var/home/core/zuul-output/0000755000175000017500000000000015066603277014541 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066631034015475 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000006345672615066631025017724 0ustar rootrootSep 29 22:26:34 crc systemd[1]: Starting Kubernetes Kubelet... Sep 29 22:26:34 crc restorecon[4732]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:34 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 22:26:35 crc restorecon[4732]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 29 22:26:36 crc kubenswrapper[4922]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 22:26:36 crc kubenswrapper[4922]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 29 22:26:36 crc kubenswrapper[4922]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 22:26:36 crc kubenswrapper[4922]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 22:26:36 crc kubenswrapper[4922]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 29 22:26:36 crc kubenswrapper[4922]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.139426 4922 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148111 4922 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148174 4922 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148184 4922 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148193 4922 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148204 4922 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148215 4922 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148225 4922 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148233 4922 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148242 4922 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148251 4922 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148259 4922 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148268 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148276 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148284 4922 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148293 4922 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148303 4922 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148312 4922 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148321 4922 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148330 4922 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148338 4922 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148346 4922 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148359 4922 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148370 4922 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148382 4922 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148449 4922 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148464 4922 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148475 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148484 4922 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148493 4922 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148503 4922 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148512 4922 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148521 4922 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148531 4922 feature_gate.go:330] unrecognized feature gate: Example Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.148552 4922 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149459 4922 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149480 4922 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149492 4922 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149503 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149516 4922 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149527 4922 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149536 4922 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149545 4922 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149556 4922 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149564 4922 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149573 4922 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149583 4922 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149592 4922 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149600 4922 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149609 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149617 4922 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149625 4922 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149634 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149643 4922 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149651 4922 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149662 4922 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149674 4922 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149684 4922 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149693 4922 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149702 4922 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149711 4922 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149720 4922 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149729 4922 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149739 4922 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149748 4922 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149757 4922 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149771 4922 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149782 4922 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149793 4922 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149804 4922 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149813 4922 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.149824 4922 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150005 4922 flags.go:64] FLAG: --address="0.0.0.0" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150024 4922 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150041 4922 flags.go:64] FLAG: --anonymous-auth="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150054 4922 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150066 4922 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150077 4922 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150091 4922 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150104 4922 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150115 4922 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150125 4922 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150137 4922 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150148 4922 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150158 4922 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150168 4922 flags.go:64] FLAG: --cgroup-root="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150178 4922 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150187 4922 flags.go:64] FLAG: --client-ca-file="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150197 4922 flags.go:64] FLAG: --cloud-config="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150206 4922 flags.go:64] FLAG: --cloud-provider="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150216 4922 flags.go:64] FLAG: --cluster-dns="[]" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150228 4922 flags.go:64] FLAG: --cluster-domain="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150238 4922 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150248 4922 flags.go:64] FLAG: --config-dir="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150258 4922 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150268 4922 flags.go:64] FLAG: --container-log-max-files="5" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150280 4922 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150290 4922 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150300 4922 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150311 4922 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150321 4922 flags.go:64] FLAG: --contention-profiling="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150330 4922 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150341 4922 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150355 4922 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150366 4922 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150378 4922 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150416 4922 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150426 4922 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150436 4922 flags.go:64] FLAG: --enable-load-reader="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150447 4922 flags.go:64] FLAG: --enable-server="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150456 4922 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150471 4922 flags.go:64] FLAG: --event-burst="100" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150481 4922 flags.go:64] FLAG: --event-qps="50" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150491 4922 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150501 4922 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150511 4922 flags.go:64] FLAG: --eviction-hard="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150522 4922 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150532 4922 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150542 4922 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150554 4922 flags.go:64] FLAG: --eviction-soft="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150564 4922 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150573 4922 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150583 4922 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150593 4922 flags.go:64] FLAG: --experimental-mounter-path="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150602 4922 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150612 4922 flags.go:64] FLAG: --fail-swap-on="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150621 4922 flags.go:64] FLAG: --feature-gates="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150633 4922 flags.go:64] FLAG: --file-check-frequency="20s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150643 4922 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150653 4922 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150663 4922 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150674 4922 flags.go:64] FLAG: --healthz-port="10248" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150684 4922 flags.go:64] FLAG: --help="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150695 4922 flags.go:64] FLAG: --hostname-override="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150704 4922 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150715 4922 flags.go:64] FLAG: --http-check-frequency="20s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150725 4922 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150734 4922 flags.go:64] FLAG: --image-credential-provider-config="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150744 4922 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150754 4922 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150764 4922 flags.go:64] FLAG: --image-service-endpoint="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150774 4922 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150783 4922 flags.go:64] FLAG: --kube-api-burst="100" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150793 4922 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150803 4922 flags.go:64] FLAG: --kube-api-qps="50" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150815 4922 flags.go:64] FLAG: --kube-reserved="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150825 4922 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150834 4922 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150844 4922 flags.go:64] FLAG: --kubelet-cgroups="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150854 4922 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150863 4922 flags.go:64] FLAG: --lock-file="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150873 4922 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150883 4922 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150892 4922 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150907 4922 flags.go:64] FLAG: --log-json-split-stream="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150918 4922 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150928 4922 flags.go:64] FLAG: --log-text-split-stream="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150938 4922 flags.go:64] FLAG: --logging-format="text" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150948 4922 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150959 4922 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.150999 4922 flags.go:64] FLAG: --manifest-url="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151009 4922 flags.go:64] FLAG: --manifest-url-header="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151023 4922 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151033 4922 flags.go:64] FLAG: --max-open-files="1000000" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151045 4922 flags.go:64] FLAG: --max-pods="110" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151055 4922 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151066 4922 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151076 4922 flags.go:64] FLAG: --memory-manager-policy="None" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151085 4922 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151097 4922 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151107 4922 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151118 4922 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151140 4922 flags.go:64] FLAG: --node-status-max-images="50" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151151 4922 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151161 4922 flags.go:64] FLAG: --oom-score-adj="-999" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151171 4922 flags.go:64] FLAG: --pod-cidr="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151181 4922 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151198 4922 flags.go:64] FLAG: --pod-manifest-path="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151207 4922 flags.go:64] FLAG: --pod-max-pids="-1" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151218 4922 flags.go:64] FLAG: --pods-per-core="0" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151227 4922 flags.go:64] FLAG: --port="10250" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151238 4922 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151248 4922 flags.go:64] FLAG: --provider-id="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151257 4922 flags.go:64] FLAG: --qos-reserved="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151267 4922 flags.go:64] FLAG: --read-only-port="10255" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151277 4922 flags.go:64] FLAG: --register-node="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151286 4922 flags.go:64] FLAG: --register-schedulable="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151297 4922 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151312 4922 flags.go:64] FLAG: --registry-burst="10" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151322 4922 flags.go:64] FLAG: --registry-qps="5" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151331 4922 flags.go:64] FLAG: --reserved-cpus="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151342 4922 flags.go:64] FLAG: --reserved-memory="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151355 4922 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151365 4922 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151375 4922 flags.go:64] FLAG: --rotate-certificates="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151385 4922 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151429 4922 flags.go:64] FLAG: --runonce="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151443 4922 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151457 4922 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151470 4922 flags.go:64] FLAG: --seccomp-default="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151479 4922 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151490 4922 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151501 4922 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151512 4922 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151522 4922 flags.go:64] FLAG: --storage-driver-password="root" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151532 4922 flags.go:64] FLAG: --storage-driver-secure="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151542 4922 flags.go:64] FLAG: --storage-driver-table="stats" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151552 4922 flags.go:64] FLAG: --storage-driver-user="root" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151562 4922 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151573 4922 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151582 4922 flags.go:64] FLAG: --system-cgroups="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151592 4922 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151608 4922 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151618 4922 flags.go:64] FLAG: --tls-cert-file="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151627 4922 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151639 4922 flags.go:64] FLAG: --tls-min-version="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151649 4922 flags.go:64] FLAG: --tls-private-key-file="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151659 4922 flags.go:64] FLAG: --topology-manager-policy="none" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151669 4922 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151679 4922 flags.go:64] FLAG: --topology-manager-scope="container" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151689 4922 flags.go:64] FLAG: --v="2" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151701 4922 flags.go:64] FLAG: --version="false" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151714 4922 flags.go:64] FLAG: --vmodule="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151727 4922 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.151737 4922 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152034 4922 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152045 4922 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152055 4922 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152065 4922 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152076 4922 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152086 4922 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152102 4922 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152113 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152123 4922 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152132 4922 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152141 4922 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152150 4922 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152159 4922 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152167 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152176 4922 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152184 4922 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152193 4922 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152201 4922 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152210 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152222 4922 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152234 4922 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152245 4922 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152254 4922 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152264 4922 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152274 4922 feature_gate.go:330] unrecognized feature gate: Example Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152282 4922 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152291 4922 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152299 4922 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152308 4922 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152316 4922 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152325 4922 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152333 4922 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152342 4922 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152351 4922 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152359 4922 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152368 4922 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152376 4922 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152385 4922 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152455 4922 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152468 4922 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152481 4922 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152490 4922 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152499 4922 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152507 4922 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152516 4922 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152525 4922 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152534 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152542 4922 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152550 4922 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152559 4922 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152567 4922 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152576 4922 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152584 4922 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152593 4922 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152601 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152610 4922 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152619 4922 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152630 4922 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152640 4922 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152650 4922 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152660 4922 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152669 4922 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152678 4922 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152686 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152695 4922 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152703 4922 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152712 4922 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152720 4922 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152729 4922 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152737 4922 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.152748 4922 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.152773 4922 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.165627 4922 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.166023 4922 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166177 4922 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166191 4922 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166199 4922 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166209 4922 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166217 4922 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166225 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166233 4922 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166242 4922 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166252 4922 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166260 4922 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166268 4922 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166276 4922 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166284 4922 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166292 4922 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166300 4922 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166308 4922 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166315 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166323 4922 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166331 4922 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166339 4922 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166350 4922 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166359 4922 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166368 4922 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166377 4922 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166385 4922 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166417 4922 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166425 4922 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166434 4922 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166442 4922 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166451 4922 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166459 4922 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166466 4922 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166474 4922 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166481 4922 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166492 4922 feature_gate.go:330] unrecognized feature gate: Example Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166500 4922 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166508 4922 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166515 4922 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166526 4922 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166538 4922 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166548 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166558 4922 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166568 4922 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166577 4922 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166586 4922 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166596 4922 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166604 4922 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166615 4922 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166624 4922 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166632 4922 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166642 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166650 4922 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166659 4922 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166667 4922 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166675 4922 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166683 4922 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166691 4922 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166698 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166706 4922 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166714 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166722 4922 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166730 4922 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166738 4922 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166747 4922 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166756 4922 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166764 4922 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166772 4922 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166779 4922 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166787 4922 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166794 4922 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.166803 4922 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.166817 4922 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167058 4922 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167075 4922 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167085 4922 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167094 4922 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167102 4922 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167110 4922 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167117 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167125 4922 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167133 4922 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167141 4922 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167148 4922 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167156 4922 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167164 4922 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167171 4922 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167179 4922 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167187 4922 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167195 4922 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167205 4922 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167215 4922 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167226 4922 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167236 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167244 4922 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167255 4922 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167265 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167274 4922 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167283 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167291 4922 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167300 4922 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167308 4922 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167316 4922 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167324 4922 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167332 4922 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167339 4922 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167347 4922 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167358 4922 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167367 4922 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167374 4922 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167382 4922 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167411 4922 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167420 4922 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167428 4922 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167435 4922 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167443 4922 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167451 4922 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167460 4922 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167469 4922 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167478 4922 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167486 4922 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167493 4922 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167501 4922 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167509 4922 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167517 4922 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167525 4922 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167535 4922 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167545 4922 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167554 4922 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167563 4922 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167572 4922 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167580 4922 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167588 4922 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167596 4922 feature_gate.go:330] unrecognized feature gate: Example Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167604 4922 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167612 4922 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167620 4922 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167628 4922 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167635 4922 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167643 4922 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167652 4922 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167660 4922 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167668 4922 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.167677 4922 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.167691 4922 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.169519 4922 server.go:940] "Client rotation is on, will bootstrap in background" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.175795 4922 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.175942 4922 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.177663 4922 server.go:997] "Starting client certificate rotation" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.177720 4922 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.178665 4922 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-01 20:58:38.766270059 +0000 UTC Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.178804 4922 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1510h32m2.587471257s for next certificate rotation Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.208900 4922 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.211539 4922 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.231680 4922 log.go:25] "Validated CRI v1 runtime API" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.272073 4922 log.go:25] "Validated CRI v1 image API" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.274243 4922 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.281707 4922 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-29-22-21-56-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.281766 4922 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.314182 4922 manager.go:217] Machine: {Timestamp:2025-09-29 22:26:36.310384037 +0000 UTC m=+0.620672940 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:dedd0e07-aa25-477e-8ea0-1bf77e1043bf BootID:4ddd4882-ce71-4215-8ae4-d2eabf83bed6 Filesystems:[{Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:14:f6:8a Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:14:f6:8a Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a7:8c:5a Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:a4:73:df Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:2e:db:d3 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:f9:46:b6 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:c6:00:b6 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:de:52:e5:bf:50:26 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:e6:ec:b0:ae:0e:f3 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.314675 4922 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.315028 4922 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.316512 4922 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.316856 4922 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.316918 4922 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.317255 4922 topology_manager.go:138] "Creating topology manager with none policy" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.317280 4922 container_manager_linux.go:303] "Creating device plugin manager" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.317843 4922 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.317888 4922 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.318206 4922 state_mem.go:36] "Initialized new in-memory state store" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.318343 4922 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.321972 4922 kubelet.go:418] "Attempting to sync node with API server" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.322008 4922 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.322033 4922 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.322058 4922 kubelet.go:324] "Adding apiserver pod source" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.322077 4922 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.327576 4922 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.328917 4922 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.329052 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.329056 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.329223 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.329249 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.330616 4922 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332628 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332672 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332687 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332705 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332729 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332743 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332757 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332780 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332797 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332812 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332831 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.332846 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.335134 4922 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.335818 4922 server.go:1280] "Started kubelet" Sep 29 22:26:36 crc systemd[1]: Started Kubernetes Kubelet. Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.337998 4922 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.337969 4922 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.338045 4922 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.338661 4922 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.339865 4922 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.339926 4922 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.340535 4922 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 04:05:28.772581149 +0000 UTC Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.340612 4922 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2045h38m52.431974097s for next certificate rotation Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.340845 4922 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.340871 4922 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.340990 4922 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.341044 4922 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.349597 4922 server.go:460] "Adding debug handlers to kubelet server" Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.350754 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.350917 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.350785 4922 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="200ms" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.353785 4922 factory.go:55] Registering systemd factory Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.353847 4922 factory.go:221] Registration of the systemd container factory successfully Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.353728 4922 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.227:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869e1406fdcaf28 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 22:26:36.335779624 +0000 UTC m=+0.646068477,LastTimestamp:2025-09-29 22:26:36.335779624 +0000 UTC m=+0.646068477,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.354998 4922 factory.go:153] Registering CRI-O factory Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.355114 4922 factory.go:221] Registration of the crio container factory successfully Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.355301 4922 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.355644 4922 factory.go:103] Registering Raw factory Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.355727 4922 manager.go:1196] Started watching for new ooms in manager Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.358517 4922 manager.go:319] Starting recovery of all containers Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.368947 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369021 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369052 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369079 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369103 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369127 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369153 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369176 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369203 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369229 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369253 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369279 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.369309 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.371797 4922 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.371976 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372021 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372056 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372084 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372147 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372197 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372264 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372359 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372448 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372503 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372530 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372582 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372742 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372784 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372837 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372867 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372917 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.372945 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373146 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373225 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373259 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373285 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373311 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373336 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373361 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373387 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373451 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373477 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373503 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373529 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373555 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373579 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373604 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373631 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373664 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373690 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373718 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373790 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373825 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373864 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373892 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373922 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373949 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.373975 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374001 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374026 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374050 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374074 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374101 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374172 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374754 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374853 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374877 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374898 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374917 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374939 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374960 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374980 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.374998 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375017 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375036 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375055 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375074 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375136 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375156 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375175 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375216 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375237 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375287 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375306 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375326 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375351 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375379 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375488 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375526 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375556 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375588 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375623 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375650 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375678 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375706 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375732 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375840 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375869 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375896 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375922 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375949 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.375976 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376006 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376038 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376064 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376105 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376138 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376169 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376198 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376230 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376261 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376289 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376336 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376365 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376434 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376466 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376494 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376520 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376550 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376575 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376603 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376629 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376654 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376679 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376750 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376790 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376819 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376845 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376886 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376911 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376934 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376964 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.376988 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377012 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377039 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377067 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377093 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377120 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377146 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377172 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377198 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377226 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377253 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377278 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377301 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377320 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377340 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377360 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377379 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377441 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377468 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377501 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377527 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377556 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377586 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377613 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377639 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377665 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377692 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377722 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377751 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377780 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377807 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377837 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377863 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377889 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377919 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377949 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.377975 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378001 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378027 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378052 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378167 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378252 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378285 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378313 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378348 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378377 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378438 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378463 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378488 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378517 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378543 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378570 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378600 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378626 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378645 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378665 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378688 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378707 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378727 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378745 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378766 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378787 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378808 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378828 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378846 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378865 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378882 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378901 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378919 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378938 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378957 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378975 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.378993 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.379012 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.379031 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.379050 4922 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.379072 4922 reconstruct.go:97] "Volume reconstruction finished" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.379111 4922 reconciler.go:26] "Reconciler: start to sync state" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.393312 4922 manager.go:324] Recovery completed Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.411924 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.414485 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.414547 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.414566 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.415602 4922 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.415643 4922 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.415677 4922 state_mem.go:36] "Initialized new in-memory state store" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.416251 4922 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.419533 4922 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.420507 4922 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.420570 4922 kubelet.go:2335] "Starting kubelet main sync loop" Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.420642 4922 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.424040 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.424109 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.438183 4922 policy_none.go:49] "None policy: Start" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.439034 4922 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.439083 4922 state_mem.go:35] "Initializing new in-memory state store" Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.444197 4922 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.513687 4922 manager.go:334] "Starting Device Plugin manager" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.513802 4922 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.513824 4922 server.go:79] "Starting device plugin registration server" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.514441 4922 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.514530 4922 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.515104 4922 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.515291 4922 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.515314 4922 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.520979 4922 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.521088 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.522458 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.522503 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.522536 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.522699 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.522925 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.522991 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.523901 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.523988 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.524008 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.524254 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.524337 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.524362 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.524376 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.524540 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.524595 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.525849 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.525899 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.525865 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.525916 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.525941 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.525962 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.526165 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.526297 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.526383 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.527794 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.527828 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.527844 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.527907 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.527935 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.527955 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.528029 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.528200 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.528247 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.528464 4922 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.529380 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.529432 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.529446 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.529458 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.529478 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.529462 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.529736 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.529767 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.530624 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.530665 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.530681 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.552251 4922 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="400ms" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.581999 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582058 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582093 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582125 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582158 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582188 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582262 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582323 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582363 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582484 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582528 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582599 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582671 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582871 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.582918 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.614927 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.616327 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.616385 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.616434 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.616472 4922 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.616966 4922 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.683941 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684016 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684054 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684083 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684113 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684143 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684171 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684203 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684234 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684264 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684294 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684324 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684352 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684381 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684444 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684540 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684595 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684614 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684658 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684545 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684610 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684716 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684703 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684774 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684807 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684839 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684854 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684879 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684915 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.684879 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.817817 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.819505 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.819554 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.819574 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.819608 4922 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.820186 4922 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.870876 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.883902 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.906620 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.931604 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-6509cfcc61c967f7094370060e54859ddf2d4b123ee5d4e86e87e475a38da90b WatchSource:0}: Error finding container 6509cfcc61c967f7094370060e54859ddf2d4b123ee5d4e86e87e475a38da90b: Status 404 returned error can't find the container with id 6509cfcc61c967f7094370060e54859ddf2d4b123ee5d4e86e87e475a38da90b Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.933232 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.933535 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-80249dc1273c43e5cd21f6366b80e2b8413c78ac46a737d16dbfa4a3cf8dcaf2 WatchSource:0}: Error finding container 80249dc1273c43e5cd21f6366b80e2b8413c78ac46a737d16dbfa4a3cf8dcaf2: Status 404 returned error can't find the container with id 80249dc1273c43e5cd21f6366b80e2b8413c78ac46a737d16dbfa4a3cf8dcaf2 Sep 29 22:26:36 crc kubenswrapper[4922]: I0929 22:26:36.941617 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.946252 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-58f09ccd36c9e67b2a5672de84a1c25d316a9b02977ac381eb2711ccbf8183f2 WatchSource:0}: Error finding container 58f09ccd36c9e67b2a5672de84a1c25d316a9b02977ac381eb2711ccbf8183f2: Status 404 returned error can't find the container with id 58f09ccd36c9e67b2a5672de84a1c25d316a9b02977ac381eb2711ccbf8183f2 Sep 29 22:26:36 crc kubenswrapper[4922]: E0929 22:26:36.953861 4922 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="800ms" Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.954515 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-7fc9cb26519494fc5d8a9ebd379b824cd3a7a892b0ad46f63b814786fbd46680 WatchSource:0}: Error finding container 7fc9cb26519494fc5d8a9ebd379b824cd3a7a892b0ad46f63b814786fbd46680: Status 404 returned error can't find the container with id 7fc9cb26519494fc5d8a9ebd379b824cd3a7a892b0ad46f63b814786fbd46680 Sep 29 22:26:36 crc kubenswrapper[4922]: W0929 22:26:36.969092 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-7353774dd5354fa2eb49d38a72300480779233f68190705be8cd2e6d8e549bdc WatchSource:0}: Error finding container 7353774dd5354fa2eb49d38a72300480779233f68190705be8cd2e6d8e549bdc: Status 404 returned error can't find the container with id 7353774dd5354fa2eb49d38a72300480779233f68190705be8cd2e6d8e549bdc Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.220719 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.222576 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.222633 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.222651 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.222686 4922 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 22:26:37 crc kubenswrapper[4922]: E0929 22:26:37.223061 4922 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.339371 4922 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:37 crc kubenswrapper[4922]: W0929 22:26:37.378342 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:37 crc kubenswrapper[4922]: E0929 22:26:37.378473 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.428877 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"58f09ccd36c9e67b2a5672de84a1c25d316a9b02977ac381eb2711ccbf8183f2"} Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.430119 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"80249dc1273c43e5cd21f6366b80e2b8413c78ac46a737d16dbfa4a3cf8dcaf2"} Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.431219 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"6509cfcc61c967f7094370060e54859ddf2d4b123ee5d4e86e87e475a38da90b"} Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.432355 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7353774dd5354fa2eb49d38a72300480779233f68190705be8cd2e6d8e549bdc"} Sep 29 22:26:37 crc kubenswrapper[4922]: I0929 22:26:37.433492 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7fc9cb26519494fc5d8a9ebd379b824cd3a7a892b0ad46f63b814786fbd46680"} Sep 29 22:26:37 crc kubenswrapper[4922]: W0929 22:26:37.503105 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:37 crc kubenswrapper[4922]: E0929 22:26:37.503211 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:37 crc kubenswrapper[4922]: W0929 22:26:37.577243 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:37 crc kubenswrapper[4922]: E0929 22:26:37.577317 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:37 crc kubenswrapper[4922]: E0929 22:26:37.755362 4922 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="1.6s" Sep 29 22:26:37 crc kubenswrapper[4922]: W0929 22:26:37.768220 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:37 crc kubenswrapper[4922]: E0929 22:26:37.768285 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.023482 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.025419 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.025472 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.025488 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.025519 4922 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 22:26:38 crc kubenswrapper[4922]: E0929 22:26:38.025999 4922 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.339247 4922 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.440923 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"59f2833e0b012302abc794c848c58de633cc797a54da2bb70c064ae55b50de2b"} Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.440987 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.440924 4922 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="59f2833e0b012302abc794c848c58de633cc797a54da2bb70c064ae55b50de2b" exitCode=0 Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.442337 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.442381 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.442420 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.443971 4922 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="2a80db6bd74786a7c030f808abac6c28c4faf3413da41f493dc94112efd17477" exitCode=0 Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.444040 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"2a80db6bd74786a7c030f808abac6c28c4faf3413da41f493dc94112efd17477"} Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.444132 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.445762 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.445800 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.445817 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.447306 4922 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4" exitCode=0 Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.447353 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4"} Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.447491 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.448935 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.448983 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.449002 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.451922 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547"} Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.451989 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94"} Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.452011 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf"} Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.454886 4922 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0" exitCode=0 Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.454961 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0"} Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.455017 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.456641 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.456689 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.456707 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.459494 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.460714 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.460770 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:38 crc kubenswrapper[4922]: I0929 22:26:38.460790 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.338997 4922 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:39 crc kubenswrapper[4922]: E0929 22:26:39.356840 4922 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.227:6443: connect: connection refused" interval="3.2s" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.458958 4922 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="7928c14929f6ac0e6db69da61764d723bfa899a0005937a88959a065abd0c203" exitCode=0 Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.459012 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"7928c14929f6ac0e6db69da61764d723bfa899a0005937a88959a065abd0c203"} Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.459122 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.460107 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.460128 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.460137 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.464790 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"cb1e7c7a3d264d1eaa255f4024c807830977ca154ec3d9bd72361add2d61e3b9"} Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.465150 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.466774 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.466798 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.466806 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.467968 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.467962 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64"} Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.468044 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97"} Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.468065 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd"} Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.469268 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.469320 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.469338 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.471842 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d"} Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.472009 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.474180 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.474221 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.474239 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.477183 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75"} Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.477229 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635"} Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.477244 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42"} Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.477257 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf"} Sep 29 22:26:39 crc kubenswrapper[4922]: W0929 22:26:39.600048 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:39 crc kubenswrapper[4922]: E0929 22:26:39.600173 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.626271 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.627673 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.627717 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.627734 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:39 crc kubenswrapper[4922]: I0929 22:26:39.627770 4922 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 22:26:39 crc kubenswrapper[4922]: E0929 22:26:39.628292 4922 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.227:6443: connect: connection refused" node="crc" Sep 29 22:26:39 crc kubenswrapper[4922]: W0929 22:26:39.645102 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.227:6443: connect: connection refused Sep 29 22:26:39 crc kubenswrapper[4922]: E0929 22:26:39.645251 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.227:6443: connect: connection refused" logger="UnhandledError" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.485266 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa"} Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.485437 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.486808 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.486871 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.486889 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.489084 4922 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6db035cfc66dc3c8b57ab97dc6d285871f3cdc4dbc5d25185b3338d131b3f5f3" exitCode=0 Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.489129 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6db035cfc66dc3c8b57ab97dc6d285871f3cdc4dbc5d25185b3338d131b3f5f3"} Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.489200 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.489272 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.489277 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.489298 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.489273 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491276 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491317 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491335 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491419 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491451 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491471 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491424 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491468 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491530 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491661 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491657 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:40 crc kubenswrapper[4922]: I0929 22:26:40.491701 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.500168 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cc2c65b22b9e515334d1dd588b131ce84704ce419dfda829238bdd81118b306c"} Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.500237 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"26c0b7776106598368bfae6c48874eda0fcc7bc6e82097ab127c3c63b5752638"} Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.500252 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"da2d23fc5be3a8646b0a782bdc740ad534b1976c2797f94e268e5e8454fa0715"} Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.500323 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.500374 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.501909 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.501965 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.501982 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.629083 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.986078 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.986481 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.988656 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.988735 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:41 crc kubenswrapper[4922]: I0929 22:26:41.988765 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.508065 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8e1abc14e6a291ad7e2f615677779053dd5d1e9b0f26fb89af66ad1071a76678"} Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.508137 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e0fd6bcecb4fd27be186a30fe51a5e5c9982ef3916e226d873c07c6b6802d18e"} Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.508175 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.508180 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.509570 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.509619 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.509616 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.509633 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.509681 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.509745 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.829181 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.831775 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.832034 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.832049 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.832093 4922 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 22:26:42 crc kubenswrapper[4922]: I0929 22:26:42.837219 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.515012 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.515054 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.516350 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.516416 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.516433 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.516574 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.516627 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.516648 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.986808 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.987203 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.990268 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.990334 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:43 crc kubenswrapper[4922]: I0929 22:26:43.990353 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.517801 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.519105 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.519165 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.519188 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.571733 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.571873 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.573273 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.573334 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.573357 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.873739 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.873958 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.875844 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.875922 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:44 crc kubenswrapper[4922]: I0929 22:26:44.875947 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:46 crc kubenswrapper[4922]: E0929 22:26:46.528722 4922 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 22:26:46 crc kubenswrapper[4922]: I0929 22:26:46.987693 4922 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 22:26:46 crc kubenswrapper[4922]: I0929 22:26:46.987797 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.020094 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.020345 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.022330 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.022444 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.022472 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.993344 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.993650 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.995489 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.995558 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:47 crc kubenswrapper[4922]: I0929 22:26:47.995577 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:48 crc kubenswrapper[4922]: I0929 22:26:48.002056 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:48 crc kubenswrapper[4922]: I0929 22:26:48.527243 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:48 crc kubenswrapper[4922]: I0929 22:26:48.528661 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:48 crc kubenswrapper[4922]: I0929 22:26:48.528726 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:48 crc kubenswrapper[4922]: I0929 22:26:48.528745 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:48 crc kubenswrapper[4922]: I0929 22:26:48.534298 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:48 crc kubenswrapper[4922]: I0929 22:26:48.897035 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:49 crc kubenswrapper[4922]: I0929 22:26:49.530330 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:49 crc kubenswrapper[4922]: I0929 22:26:49.531744 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:49 crc kubenswrapper[4922]: I0929 22:26:49.531792 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:49 crc kubenswrapper[4922]: I0929 22:26:49.531808 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:50 crc kubenswrapper[4922]: W0929 22:26:50.087908 4922 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.088040 4922 trace.go:236] Trace[1650531452]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 22:26:40.086) (total time: 10001ms): Sep 29 22:26:50 crc kubenswrapper[4922]: Trace[1650531452]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (22:26:50.087) Sep 29 22:26:50 crc kubenswrapper[4922]: Trace[1650531452]: [10.001891481s] [10.001891481s] END Sep 29 22:26:50 crc kubenswrapper[4922]: E0929 22:26:50.088074 4922 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.340154 4922 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.533098 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.535543 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.535614 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.535633 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.895151 4922 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.895236 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.904292 4922 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 22:26:50 crc kubenswrapper[4922]: I0929 22:26:50.904386 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 22:26:51 crc kubenswrapper[4922]: I0929 22:26:51.636470 4922 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]log ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]etcd ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/openshift.io-startkubeinformers ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/openshift.io-api-request-count-filter ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/generic-apiserver-start-informers ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/priority-and-fairness-config-consumer ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/priority-and-fairness-filter ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-apiextensions-informers ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-apiextensions-controllers ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/crd-informer-synced ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-system-namespaces-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-cluster-authentication-info-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-legacy-token-tracking-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-service-ip-repair-controllers ok Sep 29 22:26:51 crc kubenswrapper[4922]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Sep 29 22:26:51 crc kubenswrapper[4922]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/priority-and-fairness-config-producer ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/bootstrap-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/start-kube-aggregator-informers ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/apiservice-status-local-available-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/apiservice-status-remote-available-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/apiservice-registration-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/apiservice-wait-for-first-sync ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/apiservice-discovery-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/kube-apiserver-autoregistration ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]autoregister-completion ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/apiservice-openapi-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: [+]poststarthook/apiservice-openapiv3-controller ok Sep 29 22:26:51 crc kubenswrapper[4922]: livez check failed Sep 29 22:26:51 crc kubenswrapper[4922]: I0929 22:26:51.636540 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.105760 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.106611 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.108189 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.108242 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.108297 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.126209 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.542056 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.543308 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.543373 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:26:53 crc kubenswrapper[4922]: I0929 22:26:53.543426 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:26:54 crc kubenswrapper[4922]: I0929 22:26:54.697293 4922 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 29 22:26:55 crc kubenswrapper[4922]: E0929 22:26:55.897118 4922 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 29 22:26:55 crc kubenswrapper[4922]: I0929 22:26:55.906751 4922 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 29 22:26:55 crc kubenswrapper[4922]: I0929 22:26:55.906936 4922 trace.go:236] Trace[16377461]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 22:26:45.541) (total time: 10365ms): Sep 29 22:26:55 crc kubenswrapper[4922]: Trace[16377461]: ---"Objects listed" error: 10365ms (22:26:55.906) Sep 29 22:26:55 crc kubenswrapper[4922]: Trace[16377461]: [10.365080167s] [10.365080167s] END Sep 29 22:26:55 crc kubenswrapper[4922]: I0929 22:26:55.907238 4922 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 29 22:26:55 crc kubenswrapper[4922]: I0929 22:26:55.907379 4922 trace.go:236] Trace[1212521105]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 22:26:40.913) (total time: 14993ms): Sep 29 22:26:55 crc kubenswrapper[4922]: Trace[1212521105]: ---"Objects listed" error: 14993ms (22:26:55.907) Sep 29 22:26:55 crc kubenswrapper[4922]: Trace[1212521105]: [14.99345105s] [14.99345105s] END Sep 29 22:26:55 crc kubenswrapper[4922]: I0929 22:26:55.907445 4922 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 29 22:26:55 crc kubenswrapper[4922]: I0929 22:26:55.907671 4922 trace.go:236] Trace[405915547]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 22:26:44.341) (total time: 11566ms): Sep 29 22:26:55 crc kubenswrapper[4922]: Trace[405915547]: ---"Objects listed" error: 11566ms (22:26:55.907) Sep 29 22:26:55 crc kubenswrapper[4922]: Trace[405915547]: [11.566465306s] [11.566465306s] END Sep 29 22:26:55 crc kubenswrapper[4922]: I0929 22:26:55.907764 4922 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 29 22:26:55 crc kubenswrapper[4922]: E0929 22:26:55.908216 4922 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.043076 4922 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:41794->192.168.126.11:17697: read: connection reset by peer" start-of-body= Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.043198 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:41794->192.168.126.11:17697: read: connection reset by peer" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.113104 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.119839 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.333373 4922 apiserver.go:52] "Watching apiserver" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.337607 4922 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.337918 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.338359 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.338469 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.338554 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.338605 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.338677 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.338803 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.339039 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.339218 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.339267 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.341776 4922 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.342040 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.343000 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.343077 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.343429 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.343469 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.343528 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.343670 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.345866 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.353078 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.387223 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.408717 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.408773 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.408812 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.408842 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.408871 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.408899 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.408925 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.408952 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.408979 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409007 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409039 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409071 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409100 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409127 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409217 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409248 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409256 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409308 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409344 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409376 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409412 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409481 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409515 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409544 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409577 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409610 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409643 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409676 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409708 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409738 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409770 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409811 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409844 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409876 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409905 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409968 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410012 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410046 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410079 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410108 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410137 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410164 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410199 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410229 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410259 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410289 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410321 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410351 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410382 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410419 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410470 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410551 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410728 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410787 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410834 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410874 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410925 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410959 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410991 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411023 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411055 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411087 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411207 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411246 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411277 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411307 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411341 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411373 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411412 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411493 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411533 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411564 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411599 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411636 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411671 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409466 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.412484 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.412725 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.412873 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.413032 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.413152 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:26:56.913097183 +0000 UTC m=+21.223386026 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.413216 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.413387 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.413600 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.413762 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.413759 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.414047 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.414531 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409579 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409706 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409825 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409953 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410014 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410191 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410236 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410342 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410456 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410669 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410846 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410871 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.410962 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411160 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411292 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411571 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411635 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.411808 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.415014 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.415385 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.414170 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.415588 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.415766 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.415956 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.409566 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.416199 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.416258 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.416235 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.416842 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.419528 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.419579 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.419603 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.419725 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.419852 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.419975 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.416109 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.420190 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.420436 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.420499 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.420685 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.420711 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.420788 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.420971 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421185 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421463 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421663 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421650 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421735 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421784 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421820 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421887 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421913 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421935 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421956 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.421977 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422000 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422023 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422050 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422073 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422125 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422152 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422173 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422197 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422219 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422241 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422266 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422317 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422370 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422411 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422440 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422470 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422496 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422518 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422540 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422561 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422581 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422603 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422653 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422681 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422758 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422781 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422805 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422826 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422847 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422871 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422894 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422926 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.422981 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423014 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423048 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423081 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423108 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423131 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423159 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423182 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423203 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423226 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423249 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423278 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423301 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423323 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423344 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423366 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423394 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423436 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423473 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423510 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423539 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423562 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423584 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423607 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423631 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.423996 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424022 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424045 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424068 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424091 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424140 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424171 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424196 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424218 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424241 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424264 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424286 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424309 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424333 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424356 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424378 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424405 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424470 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424534 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424565 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424592 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424597 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424620 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424659 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424697 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424728 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424750 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424774 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424798 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424822 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424846 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424875 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424907 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424942 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.424975 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425012 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425048 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425080 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425241 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425321 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425411 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425348 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425485 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425529 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425601 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425637 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425677 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425699 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425717 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425819 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.425875 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426081 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426189 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426318 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426473 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426528 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426567 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426606 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426645 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426671 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426696 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426749 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426850 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427618 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427658 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427686 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427720 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427814 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427848 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427874 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427903 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427929 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427960 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427989 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428015 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428097 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428116 4922 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428131 4922 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428146 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428159 4922 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428173 4922 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428187 4922 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428201 4922 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428214 4922 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428229 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428243 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428257 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428272 4922 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428286 4922 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428299 4922 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428315 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428328 4922 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428341 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428556 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428573 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428587 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428602 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428617 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428631 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428645 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428663 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428716 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428733 4922 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428751 4922 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428766 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428781 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428800 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428818 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428834 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428849 4922 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428866 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428881 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428897 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428913 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428928 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428942 4922 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428955 4922 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428968 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428981 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428995 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429010 4922 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429024 4922 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429040 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429053 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429068 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429081 4922 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429095 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429109 4922 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429125 4922 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429137 4922 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429152 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429167 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429184 4922 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429197 4922 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429210 4922 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429223 4922 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.433220 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426751 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426765 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426776 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.426814 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427246 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427361 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427718 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427788 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427836 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.427908 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428190 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428276 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428361 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428708 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.428899 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429249 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429282 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429296 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.429861 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.430051 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.430149 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.430154 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.437277 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.430247 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.430546 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.430853 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.430911 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.431020 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.431142 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.431177 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.431290 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.431306 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.431361 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.431679 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.431675 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.431727 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.437744 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.433256 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.434222 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.434661 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.434807 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.435023 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.437949 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.435443 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.435526 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.435821 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.435933 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.436087 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.436322 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.437674 4922 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.438105 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:56.938084725 +0000 UTC m=+21.248373548 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.438271 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.438702 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.438829 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.439105 4922 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.439157 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.439199 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:56.939173742 +0000 UTC m=+21.249462595 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.439137 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.439327 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.439702 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.439743 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.440194 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.440226 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.440600 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.441071 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.441611 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.441491 4922 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.443427 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.443963 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.444119 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.444731 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.444951 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.445155 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.446036 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.446248 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.446512 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.446338 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.447136 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.447621 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.447738 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.448618 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.450345 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.452422 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.452942 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.453080 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.453311 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.454462 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.454657 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.443741 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.455716 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.456126 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.456267 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.458111 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.458821 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.459213 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.460608 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.460638 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.460683 4922 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.460673 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.460733 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.460756 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:56.960734539 +0000 UTC m=+21.271023362 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.460765 4922 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.460873 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:56.960835162 +0000 UTC m=+21.271124025 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.461537 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.463511 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.463655 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.463777 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.464632 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.464691 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.464695 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.464867 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.465963 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.467646 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.471741 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.472899 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.472912 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.473710 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.474120 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.478850 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.479022 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.479877 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.479906 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.480155 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.480556 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.480822 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.481014 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.481036 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.481242 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.481311 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.483887 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.484062 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.484165 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.484502 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.484589 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.484377 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.484738 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.485173 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.485345 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.485380 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.485408 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.485766 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.485904 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.486119 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.486203 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.486253 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.486677 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.486838 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.488803 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.489114 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.490339 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.490406 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.490548 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.490566 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.490727 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.491008 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.491245 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.491640 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.491886 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.492798 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.493644 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.496700 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.497312 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.497627 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.497800 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.500344 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.501368 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.502423 4922 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.503663 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.504190 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.507540 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.508281 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.509587 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.512055 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.518297 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.519107 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.519888 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.524267 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.524960 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.528752 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.529251 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.529883 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.530908 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.533928 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.533983 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534053 4922 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534066 4922 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534079 4922 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534091 4922 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534102 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534114 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534124 4922 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534135 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534146 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534158 4922 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534171 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534182 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534193 4922 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.534385 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.535202 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.535609 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.536078 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.540100 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.540106 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.540161 4922 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.540175 4922 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.540240 4922 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.540994 4922 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.541022 4922 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.541041 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.541057 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.540131 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.540557 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542853 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542887 4922 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542901 4922 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542913 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542934 4922 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542944 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542956 4922 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542968 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542985 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.542995 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543005 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543017 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543027 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543037 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543047 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543060 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543069 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543079 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543088 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543102 4922 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543111 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543120 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543134 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543143 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543153 4922 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543163 4922 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543182 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543192 4922 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543200 4922 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543209 4922 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543221 4922 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543229 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543239 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543247 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543259 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543267 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543275 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543287 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543296 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543304 4922 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543315 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543326 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543335 4922 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543344 4922 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543378 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543396 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543428 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543439 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543449 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543463 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543472 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543481 4922 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543520 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543530 4922 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543538 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543547 4922 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543561 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543572 4922 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543598 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543608 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543623 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543631 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543641 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543674 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543685 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543693 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543702 4922 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543716 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543724 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543751 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543761 4922 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543777 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543794 4922 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543803 4922 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543830 4922 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543846 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543854 4922 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543862 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543874 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543884 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543912 4922 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543922 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543936 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543946 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543957 4922 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543965 4922 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.543996 4922 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544007 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544020 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544035 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544044 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544052 4922 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544079 4922 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544094 4922 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544103 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544111 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544120 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544136 4922 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544164 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544174 4922 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544183 4922 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544196 4922 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544206 4922 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544214 4922 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544247 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544257 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544266 4922 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544278 4922 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544292 4922 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544302 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.544329 4922 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.546518 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.555978 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.558253 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.559360 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.559839 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.561609 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.562796 4922 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa" exitCode=255 Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.563608 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.565824 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.566470 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.567502 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.567941 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa"} Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.574493 4922 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.574901 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.582164 4922 scope.go:117] "RemoveContainer" containerID="1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.582270 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.593678 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.613940 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.626434 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.633893 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.638783 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.656550 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.658868 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.665022 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 22:26:56 crc kubenswrapper[4922]: W0929 22:26:56.667003 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-234c605a88c65510f08e69e1a73dd7a04b75bd263a950b2ae5355d733e12a2b1 WatchSource:0}: Error finding container 234c605a88c65510f08e69e1a73dd7a04b75bd263a950b2ae5355d733e12a2b1: Status 404 returned error can't find the container with id 234c605a88c65510f08e69e1a73dd7a04b75bd263a950b2ae5355d733e12a2b1 Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.668293 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.676589 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.678625 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: W0929 22:26:56.681843 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-fc3962353d5560cc2d0da97f8faaddab778b6965d08caa041aa4f597cfcd799a WatchSource:0}: Error finding container fc3962353d5560cc2d0da97f8faaddab778b6965d08caa041aa4f597cfcd799a: Status 404 returned error can't find the container with id fc3962353d5560cc2d0da97f8faaddab778b6965d08caa041aa4f597cfcd799a Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.694706 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.705283 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.724561 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.741538 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.754113 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.771210 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.786421 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.797213 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.807024 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.820810 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.828603 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.842116 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.851326 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.864158 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.884165 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.896080 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.911484 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.924298 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.948610 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.948687 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:56 crc kubenswrapper[4922]: I0929 22:26:56.948723 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.948820 4922 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.948873 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:57.948856435 +0000 UTC m=+22.259145248 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.949204 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:26:57.949193434 +0000 UTC m=+22.259482247 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.949280 4922 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:26:56 crc kubenswrapper[4922]: E0929 22:26:56.949314 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:57.949306157 +0000 UTC m=+22.259594970 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.049755 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.049801 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.049949 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.049963 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.049966 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.049999 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.050012 4922 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.050063 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:58.050046835 +0000 UTC m=+22.360335648 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.049975 4922 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.050136 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:58.050114037 +0000 UTC m=+22.360402930 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.214329 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-nwkv8"] Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.214633 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-pbnnm"] Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.214904 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.215228 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-nwkv8" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.217059 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.217236 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.217517 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.217603 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.217679 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.217710 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.218404 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.218931 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.245107 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.250572 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/347374f7-ade0-4434-b26d-db474c4413f9-mcd-auth-proxy-config\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.250642 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwfjx\" (UniqueName: \"kubernetes.io/projected/347374f7-ade0-4434-b26d-db474c4413f9-kube-api-access-qwfjx\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.250671 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gvjq\" (UniqueName: \"kubernetes.io/projected/d93672bf-e1a5-46d2-85af-4af1f765eb8d-kube-api-access-4gvjq\") pod \"node-resolver-nwkv8\" (UID: \"d93672bf-e1a5-46d2-85af-4af1f765eb8d\") " pod="openshift-dns/node-resolver-nwkv8" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.250693 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/347374f7-ade0-4434-b26d-db474c4413f9-proxy-tls\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.250735 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d93672bf-e1a5-46d2-85af-4af1f765eb8d-hosts-file\") pod \"node-resolver-nwkv8\" (UID: \"d93672bf-e1a5-46d2-85af-4af1f765eb8d\") " pod="openshift-dns/node-resolver-nwkv8" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.250756 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/347374f7-ade0-4434-b26d-db474c4413f9-rootfs\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.257656 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.266540 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.276047 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.282922 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.292869 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.302880 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.314405 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.328471 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.338994 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.351330 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwfjx\" (UniqueName: \"kubernetes.io/projected/347374f7-ade0-4434-b26d-db474c4413f9-kube-api-access-qwfjx\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.351602 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gvjq\" (UniqueName: \"kubernetes.io/projected/d93672bf-e1a5-46d2-85af-4af1f765eb8d-kube-api-access-4gvjq\") pod \"node-resolver-nwkv8\" (UID: \"d93672bf-e1a5-46d2-85af-4af1f765eb8d\") " pod="openshift-dns/node-resolver-nwkv8" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.351674 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/347374f7-ade0-4434-b26d-db474c4413f9-proxy-tls\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.351749 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d93672bf-e1a5-46d2-85af-4af1f765eb8d-hosts-file\") pod \"node-resolver-nwkv8\" (UID: \"d93672bf-e1a5-46d2-85af-4af1f765eb8d\") " pod="openshift-dns/node-resolver-nwkv8" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.351818 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/347374f7-ade0-4434-b26d-db474c4413f9-rootfs\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.351885 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/347374f7-ade0-4434-b26d-db474c4413f9-mcd-auth-proxy-config\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.352539 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d93672bf-e1a5-46d2-85af-4af1f765eb8d-hosts-file\") pod \"node-resolver-nwkv8\" (UID: \"d93672bf-e1a5-46d2-85af-4af1f765eb8d\") " pod="openshift-dns/node-resolver-nwkv8" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.352600 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/347374f7-ade0-4434-b26d-db474c4413f9-rootfs\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.353590 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/347374f7-ade0-4434-b26d-db474c4413f9-mcd-auth-proxy-config\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.357553 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/347374f7-ade0-4434-b26d-db474c4413f9-proxy-tls\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.361305 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.373708 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwfjx\" (UniqueName: \"kubernetes.io/projected/347374f7-ade0-4434-b26d-db474c4413f9-kube-api-access-qwfjx\") pod \"machine-config-daemon-pbnnm\" (UID: \"347374f7-ade0-4434-b26d-db474c4413f9\") " pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.378721 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.389066 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gvjq\" (UniqueName: \"kubernetes.io/projected/d93672bf-e1a5-46d2-85af-4af1f765eb8d-kube-api-access-4gvjq\") pod \"node-resolver-nwkv8\" (UID: \"d93672bf-e1a5-46d2-85af-4af1f765eb8d\") " pod="openshift-dns/node-resolver-nwkv8" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.393308 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.407168 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.415858 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.426062 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.435759 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.445750 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.453968 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.466681 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.526542 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.531456 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-nwkv8" Sep 29 22:26:57 crc kubenswrapper[4922]: W0929 22:26:57.549951 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod347374f7_ade0_4434_b26d_db474c4413f9.slice/crio-4fe847baabdedd7db66e03b46767ffdae316eeeb205ff57c71fb5e089e82c3ac WatchSource:0}: Error finding container 4fe847baabdedd7db66e03b46767ffdae316eeeb205ff57c71fb5e089e82c3ac: Status 404 returned error can't find the container with id 4fe847baabdedd7db66e03b46767ffdae316eeeb205ff57c71fb5e089e82c3ac Sep 29 22:26:57 crc kubenswrapper[4922]: W0929 22:26:57.552434 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd93672bf_e1a5_46d2_85af_4af1f765eb8d.slice/crio-81d679be1631e5267ffd9f23b942a9e01dbfa2bc83786c29e9f310015e54adaf WatchSource:0}: Error finding container 81d679be1631e5267ffd9f23b942a9e01dbfa2bc83786c29e9f310015e54adaf: Status 404 returned error can't find the container with id 81d679be1631e5267ffd9f23b942a9e01dbfa2bc83786c29e9f310015e54adaf Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.565988 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"fc3962353d5560cc2d0da97f8faaddab778b6965d08caa041aa4f597cfcd799a"} Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.568862 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"4fe847baabdedd7db66e03b46767ffdae316eeeb205ff57c71fb5e089e82c3ac"} Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.571176 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a"} Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.571206 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c"} Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.571216 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"a522f539d727973caf05a6fa4c4b4e038515c42839eaf67b11164f7b351ae88d"} Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.573035 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045"} Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.573059 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"234c605a88c65510f08e69e1a73dd7a04b75bd263a950b2ae5355d733e12a2b1"} Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.574861 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.576372 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d"} Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.576835 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.578469 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-nwkv8" event={"ID":"d93672bf-e1a5-46d2-85af-4af1f765eb8d","Type":"ContainerStarted","Data":"81d679be1631e5267ffd9f23b942a9e01dbfa2bc83786c29e9f310015e54adaf"} Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.580707 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.597469 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.612169 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-wvnl9"] Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.614600 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-md9pf"] Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.614901 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.615230 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.617092 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.617284 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.617507 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.617650 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.618156 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.618750 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.618937 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.619459 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.646832 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.654920 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-system-cni-dir\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655202 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655284 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-run-k8s-cni-cncf-io\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655352 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-etc-kubernetes\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655437 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-var-lib-cni-multus\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655536 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-socket-dir-parent\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655620 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-system-cni-dir\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655686 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-cni-dir\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655747 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-cnibin\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655807 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-os-release\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655891 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-cnibin\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.655960 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-os-release\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656023 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-conf-dir\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656087 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-run-netns\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656167 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xbwf\" (UniqueName: \"kubernetes.io/projected/95e270b3-769f-4de2-9beb-6d425d722986-kube-api-access-5xbwf\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656234 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-var-lib-kubelet\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656337 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qfdm\" (UniqueName: \"kubernetes.io/projected/6edd2cff-7363-4e99-8cc3-3db297410bce-kube-api-access-7qfdm\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656410 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6edd2cff-7363-4e99-8cc3-3db297410bce-cni-binary-copy\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656521 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-var-lib-cni-bin\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656627 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-hostroot\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656702 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-run-multus-certs\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656772 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-daemon-config\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656837 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/95e270b3-769f-4de2-9beb-6d425d722986-cni-binary-copy\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.656907 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/95e270b3-769f-4de2-9beb-6d425d722986-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.692572 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.725073 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.756762 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757335 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-system-cni-dir\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757367 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-etc-kubernetes\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757389 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757407 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-run-k8s-cni-cncf-io\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757439 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-var-lib-cni-multus\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757460 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-socket-dir-parent\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757519 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-system-cni-dir\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757527 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-etc-kubernetes\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757567 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-run-k8s-cni-cncf-io\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757542 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-cni-dir\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757622 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-system-cni-dir\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757679 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-cnibin\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757686 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-var-lib-cni-multus\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757715 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-os-release\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757710 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-socket-dir-parent\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757758 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-system-cni-dir\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757736 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-os-release\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757783 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-cnibin\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757791 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-cni-dir\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757821 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-conf-dir\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757848 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-conf-dir\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757874 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-cnibin\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757902 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-run-netns\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757921 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-var-lib-kubelet\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757955 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xbwf\" (UniqueName: \"kubernetes.io/projected/95e270b3-769f-4de2-9beb-6d425d722986-kube-api-access-5xbwf\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.757980 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qfdm\" (UniqueName: \"kubernetes.io/projected/6edd2cff-7363-4e99-8cc3-3db297410bce-kube-api-access-7qfdm\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758004 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-os-release\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758007 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-os-release\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758015 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6edd2cff-7363-4e99-8cc3-3db297410bce-cni-binary-copy\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758041 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-var-lib-kubelet\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758065 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-cnibin\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758057 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-var-lib-cni-bin\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758086 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-run-netns\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758088 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-hostroot\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758104 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-hostroot\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758116 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-run-multus-certs\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758127 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-var-lib-cni-bin\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758138 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-daemon-config\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758151 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6edd2cff-7363-4e99-8cc3-3db297410bce-host-run-multus-certs\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758156 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/95e270b3-769f-4de2-9beb-6d425d722986-cni-binary-copy\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758173 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/95e270b3-769f-4de2-9beb-6d425d722986-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758879 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/95e270b3-769f-4de2-9beb-6d425d722986-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758883 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6edd2cff-7363-4e99-8cc3-3db297410bce-cni-binary-copy\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.758970 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6edd2cff-7363-4e99-8cc3-3db297410bce-multus-daemon-config\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.759094 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/95e270b3-769f-4de2-9beb-6d425d722986-cni-binary-copy\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.759564 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/95e270b3-769f-4de2-9beb-6d425d722986-tuning-conf-dir\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.797241 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qfdm\" (UniqueName: \"kubernetes.io/projected/6edd2cff-7363-4e99-8cc3-3db297410bce-kube-api-access-7qfdm\") pod \"multus-md9pf\" (UID: \"6edd2cff-7363-4e99-8cc3-3db297410bce\") " pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.803930 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xbwf\" (UniqueName: \"kubernetes.io/projected/95e270b3-769f-4de2-9beb-6d425d722986-kube-api-access-5xbwf\") pod \"multus-additional-cni-plugins-wvnl9\" (UID: \"95e270b3-769f-4de2-9beb-6d425d722986\") " pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.807761 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.836760 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.854460 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.870921 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.882816 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.894101 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.905114 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.915533 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.927185 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.933013 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-md9pf" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.939435 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.941555 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.956254 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.959963 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.960090 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.960212 4922 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.960218 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:26:59.960180122 +0000 UTC m=+24.270468975 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.960294 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.960387 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:59.960367527 +0000 UTC m=+24.270656340 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.960490 4922 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:26:57 crc kubenswrapper[4922]: E0929 22:26:57.960553 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:26:59.960543871 +0000 UTC m=+24.270832894 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:26:57 crc kubenswrapper[4922]: I0929 22:26:57.985752 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:57Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.026582 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tqsst"] Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.029274 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.032125 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.038010 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.055933 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063358 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovn-node-metrics-cert\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063398 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-slash\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063438 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063457 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-bin\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063523 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-script-lib\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.063616 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.063644 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063652 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-ovn-kubernetes\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063683 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-var-lib-openvswitch\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063702 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063721 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-config\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.063660 4922 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063760 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063780 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-netns\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.063802 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:00.063785712 +0000 UTC m=+24.374074515 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063822 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-netd\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063843 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-env-overrides\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063870 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjrw4\" (UniqueName: \"kubernetes.io/projected/ef991319-1ee8-4778-8567-9b4e8ff7600c-kube-api-access-zjrw4\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063890 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-systemd-units\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063907 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-ovn\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063927 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-log-socket\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063945 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-kubelet\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063971 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-systemd\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.063987 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-node-log\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.064004 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-etc-openvswitch\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.064018 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-openvswitch\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.064173 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.064188 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.064198 4922 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.064237 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:00.064223663 +0000 UTC m=+24.374512476 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.075794 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.096778 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.116216 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.138299 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.155553 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164417 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-ovn-kubernetes\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164457 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-bin\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164492 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-script-lib\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164530 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-var-lib-openvswitch\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164553 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164575 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-config\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164598 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-netns\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164597 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-ovn-kubernetes\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164666 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-netd\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164617 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-netd\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164702 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-var-lib-openvswitch\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164727 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-env-overrides\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164752 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-bin\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164764 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjrw4\" (UniqueName: \"kubernetes.io/projected/ef991319-1ee8-4778-8567-9b4e8ff7600c-kube-api-access-zjrw4\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164796 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-systemd-units\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164819 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-ovn\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164841 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-log-socket\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164867 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-kubelet\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164893 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-systemd\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164918 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-node-log\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164942 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-etc-openvswitch\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.164963 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-openvswitch\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.165001 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovn-node-metrics-cert\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.165032 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-slash\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.165092 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-slash\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.165127 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.165856 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-script-lib\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.165904 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-netns\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.165943 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-kubelet\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.165980 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-systemd\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.166020 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-node-log\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.166053 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-etc-openvswitch\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.166086 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-openvswitch\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.166314 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-env-overrides\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.166544 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-log-socket\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.165873 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-config\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.166578 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-ovn\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.166623 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-systemd-units\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.169840 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovn-node-metrics-cert\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.212385 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjrw4\" (UniqueName: \"kubernetes.io/projected/ef991319-1ee8-4778-8567-9b4e8ff7600c-kube-api-access-zjrw4\") pod \"ovnkube-node-tqsst\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.232527 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.265097 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.304553 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.344174 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.352801 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: W0929 22:26:58.355175 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef991319_1ee8_4778_8567_9b4e8ff7600c.slice/crio-875e8ea62473009c543bb017a8284b82ff45aae737a659d7fbf6143b33d2fa38 WatchSource:0}: Error finding container 875e8ea62473009c543bb017a8284b82ff45aae737a659d7fbf6143b33d2fa38: Status 404 returned error can't find the container with id 875e8ea62473009c543bb017a8284b82ff45aae737a659d7fbf6143b33d2fa38 Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.403198 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.421797 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.421943 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.422081 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.422119 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.422253 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:26:58 crc kubenswrapper[4922]: E0929 22:26:58.422373 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.426371 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.427060 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.427742 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.428411 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.429021 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.429528 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.430101 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.430669 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.431308 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.431817 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.432298 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.434507 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.435230 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.436559 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.437386 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.446447 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.479699 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.508686 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.543177 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.581582 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5"} Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.581641 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59"} Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.582875 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-md9pf" event={"ID":"6edd2cff-7363-4e99-8cc3-3db297410bce","Type":"ContainerStarted","Data":"2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799"} Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.582906 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-md9pf" event={"ID":"6edd2cff-7363-4e99-8cc3-3db297410bce","Type":"ContainerStarted","Data":"4e1ce4c61c48bf1bdd78bc43cf39171144427845222d393b3f2b8869b634dcad"} Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.584138 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-nwkv8" event={"ID":"d93672bf-e1a5-46d2-85af-4af1f765eb8d","Type":"ContainerStarted","Data":"c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af"} Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.585299 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.585350 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"875e8ea62473009c543bb017a8284b82ff45aae737a659d7fbf6143b33d2fa38"} Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.586300 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" event={"ID":"95e270b3-769f-4de2-9beb-6d425d722986","Type":"ContainerStarted","Data":"27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4"} Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.586323 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" event={"ID":"95e270b3-769f-4de2-9beb-6d425d722986","Type":"ContainerStarted","Data":"3f20e8ef11ba71364f5764eca7f647f0bb18b9cf230a3aeebf1c19fb51052eb5"} Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.587829 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.624660 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.663644 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.705591 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.752791 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.785314 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.824244 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.869740 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.909844 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.951524 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:58 crc kubenswrapper[4922]: I0929 22:26:58.983559 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:58Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.027163 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.063386 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.106832 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.145479 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.205140 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.229786 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.269884 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.308058 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.344590 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.592662 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15"} Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.595514 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f" exitCode=0 Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.595600 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.598135 4922 generic.go:334] "Generic (PLEG): container finished" podID="95e270b3-769f-4de2-9beb-6d425d722986" containerID="27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4" exitCode=0 Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.598339 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" event={"ID":"95e270b3-769f-4de2-9beb-6d425d722986","Type":"ContainerDied","Data":"27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4"} Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.609138 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.627699 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.646169 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.675347 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.694362 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.715614 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.732223 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.751515 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.767955 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.782493 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.796064 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.825685 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.868803 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.908226 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.951251 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.970821 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-l2k7v"] Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.971331 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.986220 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.986383 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:59 crc kubenswrapper[4922]: E0929 22:26:59.986527 4922 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:26:59 crc kubenswrapper[4922]: E0929 22:26:59.986520 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:27:03.986469334 +0000 UTC m=+28.296758147 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.986750 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:26:59 crc kubenswrapper[4922]: E0929 22:26:59.986872 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:03.986857884 +0000 UTC m=+28.297146937 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:26:59 crc kubenswrapper[4922]: E0929 22:26:59.987058 4922 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:26:59 crc kubenswrapper[4922]: E0929 22:26:59.987133 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:03.987122651 +0000 UTC m=+28.297411714 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.988660 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:26:59Z is after 2025-08-24T17:21:41Z" Sep 29 22:26:59 crc kubenswrapper[4922]: I0929 22:26:59.995407 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.015745 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.035960 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.055749 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.088147 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.088185 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.088227 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/222f81de-5954-4c27-8d86-6281bc47901f-serviceca\") pod \"node-ca-l2k7v\" (UID: \"222f81de-5954-4c27-8d86-6281bc47901f\") " pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.088246 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/222f81de-5954-4c27-8d86-6281bc47901f-host\") pod \"node-ca-l2k7v\" (UID: \"222f81de-5954-4c27-8d86-6281bc47901f\") " pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.088263 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj86w\" (UniqueName: \"kubernetes.io/projected/222f81de-5954-4c27-8d86-6281bc47901f-kube-api-access-pj86w\") pod \"node-ca-l2k7v\" (UID: \"222f81de-5954-4c27-8d86-6281bc47901f\") " pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.088306 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.088326 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.088337 4922 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.088348 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.088358 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.088365 4922 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.088377 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:04.088363292 +0000 UTC m=+28.398652105 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.088396 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:04.088384312 +0000 UTC m=+28.398673125 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.106265 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.143745 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.184901 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.189349 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/222f81de-5954-4c27-8d86-6281bc47901f-serviceca\") pod \"node-ca-l2k7v\" (UID: \"222f81de-5954-4c27-8d86-6281bc47901f\") " pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.189385 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/222f81de-5954-4c27-8d86-6281bc47901f-host\") pod \"node-ca-l2k7v\" (UID: \"222f81de-5954-4c27-8d86-6281bc47901f\") " pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.189428 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj86w\" (UniqueName: \"kubernetes.io/projected/222f81de-5954-4c27-8d86-6281bc47901f-kube-api-access-pj86w\") pod \"node-ca-l2k7v\" (UID: \"222f81de-5954-4c27-8d86-6281bc47901f\") " pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.189480 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/222f81de-5954-4c27-8d86-6281bc47901f-host\") pod \"node-ca-l2k7v\" (UID: \"222f81de-5954-4c27-8d86-6281bc47901f\") " pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.190165 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/222f81de-5954-4c27-8d86-6281bc47901f-serviceca\") pod \"node-ca-l2k7v\" (UID: \"222f81de-5954-4c27-8d86-6281bc47901f\") " pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.233440 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj86w\" (UniqueName: \"kubernetes.io/projected/222f81de-5954-4c27-8d86-6281bc47901f-kube-api-access-pj86w\") pod \"node-ca-l2k7v\" (UID: \"222f81de-5954-4c27-8d86-6281bc47901f\") " pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.246745 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.282864 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.294418 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-l2k7v" Sep 29 22:27:00 crc kubenswrapper[4922]: W0929 22:27:00.305543 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod222f81de_5954_4c27_8d86_6281bc47901f.slice/crio-af4741931d285e356c9dac77912e339184f3025f3afed188c4b4bf3b5cc8066a WatchSource:0}: Error finding container af4741931d285e356c9dac77912e339184f3025f3afed188c4b4bf3b5cc8066a: Status 404 returned error can't find the container with id af4741931d285e356c9dac77912e339184f3025f3afed188c4b4bf3b5cc8066a Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.322971 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.370719 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.413821 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.421611 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.421646 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.421697 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.421744 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.422092 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:00 crc kubenswrapper[4922]: E0929 22:27:00.422196 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.449104 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.482168 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.521701 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.571997 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.605374 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.605520 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.605539 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.605554 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.605569 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.605548 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.605582 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.606927 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-l2k7v" event={"ID":"222f81de-5954-4c27-8d86-6281bc47901f","Type":"ContainerStarted","Data":"17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa"} Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.606990 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-l2k7v" event={"ID":"222f81de-5954-4c27-8d86-6281bc47901f","Type":"ContainerStarted","Data":"af4741931d285e356c9dac77912e339184f3025f3afed188c4b4bf3b5cc8066a"} Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.608992 4922 generic.go:334] "Generic (PLEG): container finished" podID="95e270b3-769f-4de2-9beb-6d425d722986" containerID="f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b" exitCode=0 Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.609085 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" event={"ID":"95e270b3-769f-4de2-9beb-6d425d722986","Type":"ContainerDied","Data":"f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b"} Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.648393 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.685113 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.723386 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.766790 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.808219 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.847023 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.889671 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.926605 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:00 crc kubenswrapper[4922]: I0929 22:27:00.971354 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:00Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.004777 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.052989 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.087169 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.130313 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.164991 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.204179 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.246708 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.287378 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.330749 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.369092 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.407650 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.448690 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.487714 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.540147 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.565196 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.607295 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.616319 4922 generic.go:334] "Generic (PLEG): container finished" podID="95e270b3-769f-4de2-9beb-6d425d722986" containerID="767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd" exitCode=0 Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.616386 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" event={"ID":"95e270b3-769f-4de2-9beb-6d425d722986","Type":"ContainerDied","Data":"767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd"} Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.647142 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.691235 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.726524 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.771561 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.810530 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.844257 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.885336 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.926453 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:01 crc kubenswrapper[4922]: I0929 22:27:01.964425 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:01Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.023953 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.073931 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.103465 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.121619 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.161874 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.308509 4922 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.311002 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.311045 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.311058 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.311158 4922 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.319527 4922 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.319758 4922 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.320995 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.321031 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.321042 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.321059 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.321072 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: E0929 22:27:02.335435 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.339626 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.339667 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.339680 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.339696 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.339707 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: E0929 22:27:02.352115 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.355975 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.356009 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.356018 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.356033 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.356045 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: E0929 22:27:02.369701 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.374163 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.374219 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.374230 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.374253 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.374265 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: E0929 22:27:02.385823 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.391440 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.391489 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.391505 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.391527 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.391542 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: E0929 22:27:02.406381 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: E0929 22:27:02.406521 4922 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.408527 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.408549 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.408558 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.408574 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.408585 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.421868 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.421879 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.421937 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:02 crc kubenswrapper[4922]: E0929 22:27:02.421976 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:02 crc kubenswrapper[4922]: E0929 22:27:02.422114 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:02 crc kubenswrapper[4922]: E0929 22:27:02.422205 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.511997 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.512064 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.512084 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.512113 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.512133 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.615295 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.615351 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.615362 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.615390 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.615403 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.623152 4922 generic.go:334] "Generic (PLEG): container finished" podID="95e270b3-769f-4de2-9beb-6d425d722986" containerID="3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739" exitCode=0 Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.623200 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" event={"ID":"95e270b3-769f-4de2-9beb-6d425d722986","Type":"ContainerDied","Data":"3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739"} Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.629181 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.642512 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.657115 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.675753 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.693461 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.713133 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.719194 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.719262 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.719306 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.719326 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.719360 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.726620 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.741852 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.755265 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.781320 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.795238 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.804606 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.820520 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.822712 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.822759 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.822774 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.822798 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.822812 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.843090 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.856802 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:02Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.925216 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.925256 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.925266 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.925281 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:02 crc kubenswrapper[4922]: I0929 22:27:02.925293 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:02Z","lastTransitionTime":"2025-09-29T22:27:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.027575 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.027613 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.027738 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.027760 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.027781 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.131308 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.131356 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.131371 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.131428 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.131447 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.234521 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.234572 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.234581 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.234602 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.234613 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.337749 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.337805 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.337815 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.337837 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.337850 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.440040 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.440117 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.440128 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.440149 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.440162 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.543999 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.544056 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.544075 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.544099 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.544119 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.638530 4922 generic.go:334] "Generic (PLEG): container finished" podID="95e270b3-769f-4de2-9beb-6d425d722986" containerID="32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df" exitCode=0 Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.638629 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" event={"ID":"95e270b3-769f-4de2-9beb-6d425d722986","Type":"ContainerDied","Data":"32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.646787 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.646836 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.646854 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.646879 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.646897 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.655068 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.679075 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.699528 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.714454 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.737295 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.749913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.750053 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.750079 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.750110 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.750133 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.759722 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.780236 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.801673 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.828447 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.848916 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.853557 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.853768 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.853908 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.854046 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.854203 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.869030 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.901045 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.917704 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.937093 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:03Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.956747 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.956835 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.956857 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.956883 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:03 crc kubenswrapper[4922]: I0929 22:27:03.956901 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:03Z","lastTransitionTime":"2025-09-29T22:27:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.031052 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.031170 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.031221 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.031343 4922 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.031417 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:12.031380989 +0000 UTC m=+36.341669802 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.031821 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:27:12.03181055 +0000 UTC m=+36.342099363 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.031864 4922 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.031919 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:12.031892412 +0000 UTC m=+36.342181225 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.059242 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.059281 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.059293 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.059310 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.059321 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.131864 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.131949 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.132133 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.132185 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.132204 4922 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.132151 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.132302 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.132325 4922 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.132281 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:12.132257402 +0000 UTC m=+36.442546225 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.132436 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:12.132402105 +0000 UTC m=+36.442690938 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.163073 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.163123 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.163136 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.163155 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.163168 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.267184 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.267299 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.267322 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.267357 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.267381 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.370829 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.370895 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.370912 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.370936 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.370954 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.421614 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.421568 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.421756 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.421870 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.422059 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:04 crc kubenswrapper[4922]: E0929 22:27:04.422159 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.474122 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.474242 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.474266 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.474297 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.474319 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.577107 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.577144 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.577155 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.577172 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.577184 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.648844 4922 generic.go:334] "Generic (PLEG): container finished" podID="95e270b3-769f-4de2-9beb-6d425d722986" containerID="37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d" exitCode=0 Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.648953 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" event={"ID":"95e270b3-769f-4de2-9beb-6d425d722986","Type":"ContainerDied","Data":"37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.661326 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.662612 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.662655 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.681062 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.681168 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.681188 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.681218 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.681238 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.683094 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.700434 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.701611 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.704289 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.731576 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.749483 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.768984 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.784676 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.785693 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.785765 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.785776 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.785796 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.785831 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.799788 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.812662 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.827169 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.856076 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.887823 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.887881 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.887896 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.887913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.887924 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.898680 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.923797 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.943143 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.955024 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.969425 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.987311 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.989995 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.990023 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.990032 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.990046 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:04 crc kubenswrapper[4922]: I0929 22:27:04.990055 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:04Z","lastTransitionTime":"2025-09-29T22:27:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.003864 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.025154 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.040725 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.056150 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.075108 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.088608 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.092834 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.092871 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.092879 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.092892 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.092902 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:05Z","lastTransitionTime":"2025-09-29T22:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.108672 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.128349 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.142305 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.168286 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.185752 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.196975 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.197051 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.197072 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.197100 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.197119 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:05Z","lastTransitionTime":"2025-09-29T22:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.200742 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.300555 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.300624 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.300643 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.300670 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.300688 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:05Z","lastTransitionTime":"2025-09-29T22:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.403450 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.403512 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.403528 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.403552 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.403569 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:05Z","lastTransitionTime":"2025-09-29T22:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.506992 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.507038 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.507055 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.507077 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.507094 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:05Z","lastTransitionTime":"2025-09-29T22:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.611198 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.611675 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.611693 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.611720 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.611741 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:05Z","lastTransitionTime":"2025-09-29T22:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.670310 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" event={"ID":"95e270b3-769f-4de2-9beb-6d425d722986","Type":"ContainerStarted","Data":"dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.670360 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.687357 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.710833 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.715640 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.715705 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.715724 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.715750 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.715769 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:05Z","lastTransitionTime":"2025-09-29T22:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.735966 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.761222 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.786627 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.812694 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.818797 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.818865 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.818883 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.818907 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.818927 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:05Z","lastTransitionTime":"2025-09-29T22:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.833012 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.860709 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.880208 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.898060 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.922227 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.922292 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.922308 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.922340 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.922359 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:05Z","lastTransitionTime":"2025-09-29T22:27:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.928496 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.945098 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.966300 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:05 crc kubenswrapper[4922]: I0929 22:27:05.986856 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:05Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.025629 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.025732 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.025752 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.025775 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.025792 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.128757 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.128820 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.128839 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.128863 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.128891 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.231493 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.231561 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.231579 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.231785 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.231815 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.335365 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.335449 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.335474 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.335500 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.335523 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.421891 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.421975 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.424020 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:06 crc kubenswrapper[4922]: E0929 22:27:06.424195 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:06 crc kubenswrapper[4922]: E0929 22:27:06.423941 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:06 crc kubenswrapper[4922]: E0929 22:27:06.425097 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.438785 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.438881 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.438902 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.438928 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.438946 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.447738 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.481362 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.501347 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.532333 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.541284 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.541387 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.541457 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.541483 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.541541 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.555619 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.575224 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.597314 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.626952 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.645877 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.645927 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.645941 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.645966 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.645982 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.655784 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.674145 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.676764 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.694575 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.719646 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.742595 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.749293 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.749354 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.749371 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.749425 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.749446 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.763279 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.852758 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.852843 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.852878 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.852911 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.852934 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.955996 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.956048 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.956065 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.956090 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:06 crc kubenswrapper[4922]: I0929 22:27:06.956107 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:06Z","lastTransitionTime":"2025-09-29T22:27:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.060218 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.060285 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.060303 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.060332 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.060351 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.163940 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.164034 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.164053 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.164081 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.164101 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.267980 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.268042 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.268059 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.268085 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.268103 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.370953 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.371001 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.371017 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.371035 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.371046 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.473208 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.473276 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.473286 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.473307 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.473316 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.576050 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.576094 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.576105 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.576124 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.576143 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.682657 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.682732 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.682746 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.682770 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.682784 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.785448 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.785489 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.785498 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.785513 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.785523 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.887536 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.887573 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.887581 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.887595 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.887604 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.989873 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.989913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.989924 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.989940 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:07 crc kubenswrapper[4922]: I0929 22:27:07.989949 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:07Z","lastTransitionTime":"2025-09-29T22:27:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.091749 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.091785 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.091801 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.091819 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.091834 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:08Z","lastTransitionTime":"2025-09-29T22:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.194297 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.194352 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.194364 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.194380 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.194415 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:08Z","lastTransitionTime":"2025-09-29T22:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.296893 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.296956 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.296968 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.297005 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.297018 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:08Z","lastTransitionTime":"2025-09-29T22:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.400043 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.400103 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.400121 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.400146 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.400166 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:08Z","lastTransitionTime":"2025-09-29T22:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.421584 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.421606 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:08 crc kubenswrapper[4922]: E0929 22:27:08.421770 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.421813 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:08 crc kubenswrapper[4922]: E0929 22:27:08.421936 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:08 crc kubenswrapper[4922]: E0929 22:27:08.422036 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.459435 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.478780 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.503256 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.503325 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.503346 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.503371 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.503416 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:08Z","lastTransitionTime":"2025-09-29T22:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.513086 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.530425 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.550290 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.569629 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.584908 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.607184 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.607255 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.607271 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.607301 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.607326 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:08Z","lastTransitionTime":"2025-09-29T22:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.611211 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.630898 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.656280 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.676620 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.690449 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/0.log" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.695995 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b" exitCode=1 Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.696050 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.697280 4922 scope.go:117] "RemoveContainer" containerID="390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.700577 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.710630 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.710698 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.710730 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.710757 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.710778 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:08Z","lastTransitionTime":"2025-09-29T22:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.722073 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.745512 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.767707 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.791923 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.810714 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.812843 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.812889 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.812902 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.812922 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.812934 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:08Z","lastTransitionTime":"2025-09-29T22:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.827653 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.842655 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.861936 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.879257 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.900923 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.915467 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.915538 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.915551 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.915593 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.915607 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:08Z","lastTransitionTime":"2025-09-29T22:27:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.920028 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.933209 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.948552 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.961267 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.983211 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"message\\\":\\\"go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:08.086797 6262 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:08.086811 6262 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 22:27:08.086823 6262 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 22:27:08.086834 6262 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 22:27:08.086848 6262 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:08.086876 6262 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:08.086897 6262 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:08.086937 6262 factory.go:656] Stopping watch factory\\\\nI0929 22:27:08.086973 6262 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:08.086997 6262 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:08.087019 6262 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:08.087021 6262 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 22:27:08.087035 6262 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:08.087052 6262 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:08 crc kubenswrapper[4922]: I0929 22:27:08.997345 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:08Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.014249 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.018509 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.018556 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.018575 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.018597 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.018614 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.121595 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.121640 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.121652 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.121670 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.121684 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.224183 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.224230 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.224242 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.224280 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.224292 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.326802 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.326849 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.326862 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.326879 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.326892 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.429655 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.429710 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.429727 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.429748 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.429765 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.532933 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.532991 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.533008 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.533032 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.533051 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.635963 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.636006 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.636016 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.636032 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.636044 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.701293 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/0.log" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.704366 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.704548 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.723752 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.738601 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.738643 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.738656 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.738676 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.738688 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.742027 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.758414 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.771859 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.788016 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.804278 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.821724 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.838453 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.841336 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.841426 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.841446 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.841471 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.841490 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.852561 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.875187 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"message\\\":\\\"go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:08.086797 6262 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:08.086811 6262 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 22:27:08.086823 6262 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 22:27:08.086834 6262 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 22:27:08.086848 6262 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:08.086876 6262 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:08.086897 6262 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:08.086937 6262 factory.go:656] Stopping watch factory\\\\nI0929 22:27:08.086973 6262 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:08.086997 6262 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:08.087019 6262 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:08.087021 6262 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 22:27:08.087035 6262 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:08.087052 6262 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.890050 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.907975 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.928161 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.945976 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.946046 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.946066 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.946097 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.946118 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:09Z","lastTransitionTime":"2025-09-29T22:27:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:09 crc kubenswrapper[4922]: I0929 22:27:09.948802 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:09Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.049570 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.049665 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.049685 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.049711 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.049731 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.152377 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.152462 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.152481 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.152504 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.152521 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.255478 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.255531 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.255548 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.255572 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.255592 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.328448 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx"] Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.329133 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.331485 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.332832 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.346436 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.358717 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.358792 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.358816 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.358849 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.358876 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.372530 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.393555 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.407146 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/09d9860a-bccf-4df5-8664-3af823b9bec5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.407201 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jnwb\" (UniqueName: \"kubernetes.io/projected/09d9860a-bccf-4df5-8664-3af823b9bec5-kube-api-access-4jnwb\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.407285 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/09d9860a-bccf-4df5-8664-3af823b9bec5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.407319 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/09d9860a-bccf-4df5-8664-3af823b9bec5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.415475 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.421495 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.421557 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.421588 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:10 crc kubenswrapper[4922]: E0929 22:27:10.421750 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:10 crc kubenswrapper[4922]: E0929 22:27:10.421913 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:10 crc kubenswrapper[4922]: E0929 22:27:10.422014 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.436604 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.453980 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.462220 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.462277 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.462292 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.462313 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.462329 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.474105 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.493629 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.508276 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/09d9860a-bccf-4df5-8664-3af823b9bec5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.508307 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/09d9860a-bccf-4df5-8664-3af823b9bec5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.508345 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/09d9860a-bccf-4df5-8664-3af823b9bec5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.508365 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jnwb\" (UniqueName: \"kubernetes.io/projected/09d9860a-bccf-4df5-8664-3af823b9bec5-kube-api-access-4jnwb\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.509104 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/09d9860a-bccf-4df5-8664-3af823b9bec5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.509321 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/09d9860a-bccf-4df5-8664-3af823b9bec5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.513331 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/09d9860a-bccf-4df5-8664-3af823b9bec5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.513941 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.523761 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jnwb\" (UniqueName: \"kubernetes.io/projected/09d9860a-bccf-4df5-8664-3af823b9bec5-kube-api-access-4jnwb\") pod \"ovnkube-control-plane-749d76644c-s5zhx\" (UID: \"09d9860a-bccf-4df5-8664-3af823b9bec5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.527862 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.543765 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.556580 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.565465 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.565494 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.565503 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.565517 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.565527 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.581106 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"message\\\":\\\"go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:08.086797 6262 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:08.086811 6262 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 22:27:08.086823 6262 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 22:27:08.086834 6262 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 22:27:08.086848 6262 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:08.086876 6262 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:08.086897 6262 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:08.086937 6262 factory.go:656] Stopping watch factory\\\\nI0929 22:27:08.086973 6262 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:08.086997 6262 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:08.087019 6262 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:08.087021 6262 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 22:27:08.087035 6262 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:08.087052 6262 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.596590 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.614727 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.652133 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.668354 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.668437 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.668456 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.668482 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.668500 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.710977 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/1.log" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.711983 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/0.log" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.716047 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50" exitCode=1 Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.716157 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.716251 4922 scope.go:117] "RemoveContainer" containerID="390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.717113 4922 scope.go:117] "RemoveContainer" containerID="b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50" Sep 29 22:27:10 crc kubenswrapper[4922]: E0929 22:27:10.717289 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.718097 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" event={"ID":"09d9860a-bccf-4df5-8664-3af823b9bec5","Type":"ContainerStarted","Data":"7044eb2b43e63264117198b00a557710c8ff8b7383a5e680af07b6c53058e751"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.732860 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.751069 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.771815 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.771888 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.771907 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.771934 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.771956 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.775435 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.807163 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"message\\\":\\\"go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:08.086797 6262 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:08.086811 6262 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 22:27:08.086823 6262 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 22:27:08.086834 6262 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 22:27:08.086848 6262 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:08.086876 6262 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:08.086897 6262 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:08.086937 6262 factory.go:656] Stopping watch factory\\\\nI0929 22:27:08.086973 6262 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:08.086997 6262 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:08.087019 6262 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:08.087021 6262 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 22:27:08.087035 6262 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:08.087052 6262 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\" 6404 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 22:27:09.673837 6404 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:09.673867 6404 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:09.673874 6404 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:09.673916 6404 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 22:27:09.673924 6404 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 22:27:09.673946 6404 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 22:27:09.673960 6404 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:09.673977 6404 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:09.673974 6404 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:09.673982 6404 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:09.674000 6404 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:09.674004 6404 factory.go:656] Stopping watch factory\\\\nI0929 22:27:09.674012 6404 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:09.674006 6404 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 22:27:09.674040 6404 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.822640 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.836235 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.851015 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.872179 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.877710 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.877754 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.877767 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.877784 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.877799 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.894688 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.918540 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.935036 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.948633 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.961599 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.971867 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.980494 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.980550 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.980567 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.980591 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.980610 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:10Z","lastTransitionTime":"2025-09-29T22:27:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:10 crc kubenswrapper[4922]: I0929 22:27:10.984334 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:10Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.083847 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.083913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.083933 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.083958 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.083977 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:11Z","lastTransitionTime":"2025-09-29T22:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.186626 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.186699 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.186722 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.186750 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.186805 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:11Z","lastTransitionTime":"2025-09-29T22:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.290111 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.290175 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.290200 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.290229 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.290250 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:11Z","lastTransitionTime":"2025-09-29T22:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.393820 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.393884 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.393925 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.393949 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.393965 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:11Z","lastTransitionTime":"2025-09-29T22:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.496707 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.496765 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.496781 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.496806 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.496827 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:11Z","lastTransitionTime":"2025-09-29T22:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.599658 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.599711 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.599728 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.599751 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.599769 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:11Z","lastTransitionTime":"2025-09-29T22:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.702678 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.702741 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.702758 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.702783 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.702802 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:11Z","lastTransitionTime":"2025-09-29T22:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.725754 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/1.log" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.734583 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" event={"ID":"09d9860a-bccf-4df5-8664-3af823b9bec5","Type":"ContainerStarted","Data":"1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.734645 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" event={"ID":"09d9860a-bccf-4df5-8664-3af823b9bec5","Type":"ContainerStarted","Data":"7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.753825 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.772629 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.794642 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.806929 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.806986 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.807004 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.807028 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.807045 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:11Z","lastTransitionTime":"2025-09-29T22:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.813526 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.831345 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.854031 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.874283 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.891822 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.910748 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.910836 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.910884 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.910910 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.910927 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:11Z","lastTransitionTime":"2025-09-29T22:27:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.912270 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.934768 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.954201 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.973796 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:11 crc kubenswrapper[4922]: I0929 22:27:11.991418 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:11Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.013996 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.014072 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.014090 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.014115 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.014140 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.025508 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"message\\\":\\\"go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:08.086797 6262 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:08.086811 6262 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 22:27:08.086823 6262 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 22:27:08.086834 6262 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 22:27:08.086848 6262 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:08.086876 6262 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:08.086897 6262 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:08.086937 6262 factory.go:656] Stopping watch factory\\\\nI0929 22:27:08.086973 6262 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:08.086997 6262 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:08.087019 6262 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:08.087021 6262 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 22:27:08.087035 6262 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:08.087052 6262 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\" 6404 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 22:27:09.673837 6404 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:09.673867 6404 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:09.673874 6404 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:09.673916 6404 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 22:27:09.673924 6404 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 22:27:09.673946 6404 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 22:27:09.673960 6404 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:09.673977 6404 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:09.673974 6404 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:09.673982 6404 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:09.674000 6404 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:09.674004 6404 factory.go:656] Stopping watch factory\\\\nI0929 22:27:09.674012 6404 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:09.674006 6404 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 22:27:09.674040 6404 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.043349 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.117968 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.118442 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.118462 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.118494 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.118513 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.125561 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.125736 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.125815 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:27:28.125780114 +0000 UTC m=+52.436068977 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.125868 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.125933 4922 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.126017 4922 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.126082 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:28.12604455 +0000 UTC m=+52.436333403 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.126129 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:28.126109902 +0000 UTC m=+52.436398825 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.221856 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.221909 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.221927 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.221950 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.221968 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.227141 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.227208 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.227473 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.227510 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.227507 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.227564 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.227586 4922 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.227530 4922 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.227674 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:28.227648011 +0000 UTC m=+52.537936864 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.227744 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:28.227715432 +0000 UTC m=+52.538004295 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.230532 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-gkfvg"] Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.231514 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.231640 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.244763 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.261152 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.276444 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.293190 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.306693 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.325102 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.325141 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.325154 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.325171 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.325185 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.328454 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42tg6\" (UniqueName: \"kubernetes.io/projected/51c5d7b9-741c-448f-b19e-9441e62a48c6-kube-api-access-42tg6\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.328561 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.337045 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"message\\\":\\\"go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:08.086797 6262 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:08.086811 6262 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 22:27:08.086823 6262 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 22:27:08.086834 6262 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 22:27:08.086848 6262 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:08.086876 6262 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:08.086897 6262 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:08.086937 6262 factory.go:656] Stopping watch factory\\\\nI0929 22:27:08.086973 6262 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:08.086997 6262 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:08.087019 6262 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:08.087021 6262 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 22:27:08.087035 6262 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:08.087052 6262 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\" 6404 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 22:27:09.673837 6404 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:09.673867 6404 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:09.673874 6404 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:09.673916 6404 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 22:27:09.673924 6404 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 22:27:09.673946 6404 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 22:27:09.673960 6404 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:09.673977 6404 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:09.673974 6404 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:09.673982 6404 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:09.674000 6404 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:09.674004 6404 factory.go:656] Stopping watch factory\\\\nI0929 22:27:09.674012 6404 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:09.674006 6404 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 22:27:09.674040 6404 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.352716 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.368953 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.392721 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.410749 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.421501 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.421652 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.421515 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.421714 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.421838 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.422120 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.427146 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.427339 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.427383 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.427429 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.427457 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.427474 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.429414 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42tg6\" (UniqueName: \"kubernetes.io/projected/51c5d7b9-741c-448f-b19e-9441e62a48c6-kube-api-access-42tg6\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.429507 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.429643 4922 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.429719 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs podName:51c5d7b9-741c-448f-b19e-9441e62a48c6 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:12.929698542 +0000 UTC m=+37.239987395 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs") pod "network-metrics-daemon-gkfvg" (UID: "51c5d7b9-741c-448f-b19e-9441e62a48c6") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.444962 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.458426 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42tg6\" (UniqueName: \"kubernetes.io/projected/51c5d7b9-741c-448f-b19e-9441e62a48c6-kube-api-access-42tg6\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.476123 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.491635 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.506903 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.525948 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.530306 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.530420 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.530437 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.530456 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.530468 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.633069 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.633124 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.633139 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.633161 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.633180 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.742859 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.742921 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.742938 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.742960 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.742978 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.807336 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.807422 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.807443 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.807466 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.807483 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.827908 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.832677 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.832725 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.832744 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.832765 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.832782 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.853461 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.859801 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.859900 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.859923 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.859947 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.859964 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.880276 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.884791 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.884853 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.884877 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.884907 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.884930 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.903647 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.908682 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.908726 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.908740 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.908759 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.908775 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.926632 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:12Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.926783 4922 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.928740 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.928772 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.928783 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.928798 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.928809 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:12Z","lastTransitionTime":"2025-09-29T22:27:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:12 crc kubenswrapper[4922]: I0929 22:27:12.933424 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.933543 4922 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:12 crc kubenswrapper[4922]: E0929 22:27:12.933595 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs podName:51c5d7b9-741c-448f-b19e-9441e62a48c6 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:13.933579631 +0000 UTC m=+38.243868454 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs") pod "network-metrics-daemon-gkfvg" (UID: "51c5d7b9-741c-448f-b19e-9441e62a48c6") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.032889 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.032954 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.032974 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.033000 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.033028 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.146616 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.146696 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.146719 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.146750 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.146769 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.249707 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.249797 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.249816 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.249840 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.249858 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.353359 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.353488 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.353507 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.353535 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.353553 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.421596 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:13 crc kubenswrapper[4922]: E0929 22:27:13.421795 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.457009 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.457074 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.457093 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.457121 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.457139 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.559914 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.560033 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.560107 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.560135 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.560152 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.663951 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.664005 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.664024 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.664047 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.664065 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.766915 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.766985 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.767006 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.767035 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.767055 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.870044 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.870101 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.870119 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.870142 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.870160 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.944157 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:13 crc kubenswrapper[4922]: E0929 22:27:13.944380 4922 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:13 crc kubenswrapper[4922]: E0929 22:27:13.944500 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs podName:51c5d7b9-741c-448f-b19e-9441e62a48c6 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:15.944478447 +0000 UTC m=+40.254767290 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs") pod "network-metrics-daemon-gkfvg" (UID: "51c5d7b9-741c-448f-b19e-9441e62a48c6") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.974192 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.974265 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.974284 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.974308 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:13 crc kubenswrapper[4922]: I0929 22:27:13.974325 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:13Z","lastTransitionTime":"2025-09-29T22:27:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.077245 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.077303 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.077320 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.077346 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.077373 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:14Z","lastTransitionTime":"2025-09-29T22:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.180698 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.180774 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.180797 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.180832 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.180857 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:14Z","lastTransitionTime":"2025-09-29T22:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.284790 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.284863 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.284903 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.284937 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.284960 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:14Z","lastTransitionTime":"2025-09-29T22:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.388240 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.388311 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.388329 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.388353 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.388370 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:14Z","lastTransitionTime":"2025-09-29T22:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.420945 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.421043 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.420945 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:14 crc kubenswrapper[4922]: E0929 22:27:14.421186 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:14 crc kubenswrapper[4922]: E0929 22:27:14.421290 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:14 crc kubenswrapper[4922]: E0929 22:27:14.421465 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.490815 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.490883 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.490899 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.490925 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.490943 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:14Z","lastTransitionTime":"2025-09-29T22:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.593659 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.593760 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.595692 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.595734 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.595761 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:14Z","lastTransitionTime":"2025-09-29T22:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.698754 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.698822 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.698846 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.698879 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.698902 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:14Z","lastTransitionTime":"2025-09-29T22:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.802010 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.802089 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.802112 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.802139 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.802158 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:14Z","lastTransitionTime":"2025-09-29T22:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.906470 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.906541 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.906563 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.906594 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:14 crc kubenswrapper[4922]: I0929 22:27:14.906618 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:14Z","lastTransitionTime":"2025-09-29T22:27:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.010462 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.010536 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.010553 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.010580 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.010598 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.113967 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.114046 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.114068 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.114097 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.114119 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.216872 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.216985 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.217005 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.217029 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.217044 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.319752 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.319806 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.319822 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.319847 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.319864 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.420989 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:15 crc kubenswrapper[4922]: E0929 22:27:15.421104 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.423230 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.423270 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.423280 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.423296 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.423307 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.525838 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.525887 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.525909 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.525933 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.525951 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.629469 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.629548 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.629566 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.629637 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.629658 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.732781 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.732832 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.732841 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.732861 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.732870 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.836270 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.836331 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.836346 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.836364 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.836377 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.938891 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.938974 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.938991 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.939021 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.939050 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:15Z","lastTransitionTime":"2025-09-29T22:27:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:15 crc kubenswrapper[4922]: I0929 22:27:15.969494 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:15 crc kubenswrapper[4922]: E0929 22:27:15.970327 4922 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:15 crc kubenswrapper[4922]: E0929 22:27:15.970456 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs podName:51c5d7b9-741c-448f-b19e-9441e62a48c6 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:19.970432781 +0000 UTC m=+44.280721624 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs") pod "network-metrics-daemon-gkfvg" (UID: "51c5d7b9-741c-448f-b19e-9441e62a48c6") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.042658 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.042715 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.042733 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.042756 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.042775 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.145879 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.145934 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.145950 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.145978 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.145996 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.249775 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.249834 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.249850 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.249876 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.249897 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.352952 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.353002 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.353020 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.353042 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.353059 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.421295 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:16 crc kubenswrapper[4922]: E0929 22:27:16.421497 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.421701 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.421851 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:16 crc kubenswrapper[4922]: E0929 22:27:16.422022 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:16 crc kubenswrapper[4922]: E0929 22:27:16.422253 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.445344 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.457042 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.457091 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.457114 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.457141 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.457162 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.466776 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.491307 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.510061 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.537512 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.559290 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.559354 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.559373 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.559425 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.559446 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.560052 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.579187 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.597183 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.614948 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.629748 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.646704 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.662769 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.662853 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.662878 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.662944 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.662970 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.665144 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.694145 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://390cb50279597f8bcaa66287951dfb48ac8a68681f6bdd44cc073ccada88b74b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"message\\\":\\\"go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:08.086797 6262 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:08.086811 6262 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0929 22:27:08.086823 6262 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0929 22:27:08.086834 6262 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 22:27:08.086848 6262 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:08.086876 6262 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:08.086897 6262 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:08.086937 6262 factory.go:656] Stopping watch factory\\\\nI0929 22:27:08.086973 6262 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:08.086997 6262 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:08.087019 6262 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:08.087021 6262 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0929 22:27:08.087035 6262 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:08.087052 6262 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\" 6404 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 22:27:09.673837 6404 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:09.673867 6404 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:09.673874 6404 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:09.673916 6404 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 22:27:09.673924 6404 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 22:27:09.673946 6404 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 22:27:09.673960 6404 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:09.673977 6404 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:09.673974 6404 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:09.673982 6404 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:09.674000 6404 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:09.674004 6404 factory.go:656] Stopping watch factory\\\\nI0929 22:27:09.674012 6404 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:09.674006 6404 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 22:27:09.674040 6404 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.709947 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.725032 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.740481 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:16Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.765525 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.765587 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.765604 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.765632 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.765657 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.868824 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.868896 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.868917 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.868943 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.868960 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.972314 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.972369 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.972441 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.972506 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:16 crc kubenswrapper[4922]: I0929 22:27:16.972529 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:16Z","lastTransitionTime":"2025-09-29T22:27:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.075493 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.075542 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.075554 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.075572 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.075583 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:17Z","lastTransitionTime":"2025-09-29T22:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.178925 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.178986 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.179185 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.179203 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.179215 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:17Z","lastTransitionTime":"2025-09-29T22:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.282294 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.282338 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.282354 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.282377 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.282429 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:17Z","lastTransitionTime":"2025-09-29T22:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.385623 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.385687 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.385704 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.385726 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.385744 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:17Z","lastTransitionTime":"2025-09-29T22:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.421092 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:17 crc kubenswrapper[4922]: E0929 22:27:17.421277 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.488902 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.488982 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.489005 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.489036 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.489054 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:17Z","lastTransitionTime":"2025-09-29T22:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.591767 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.591843 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.591915 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.591946 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.591972 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:17Z","lastTransitionTime":"2025-09-29T22:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.694941 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.695016 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.695038 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.695069 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.695092 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:17Z","lastTransitionTime":"2025-09-29T22:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.797601 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.797666 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.797688 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.797720 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.797743 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:17Z","lastTransitionTime":"2025-09-29T22:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.900928 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.901001 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.901043 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.901077 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:17 crc kubenswrapper[4922]: I0929 22:27:17.901101 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:17Z","lastTransitionTime":"2025-09-29T22:27:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.003850 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.003924 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.003947 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.003977 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.003999 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.106824 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.106889 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.106906 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.106930 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.106947 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.210108 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.210161 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.210178 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.210202 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.210219 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.312678 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.312833 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.312853 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.312895 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.312915 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.415618 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.415694 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.415711 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.415736 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.415757 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.420940 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.421032 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.421102 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:18 crc kubenswrapper[4922]: E0929 22:27:18.421242 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:18 crc kubenswrapper[4922]: E0929 22:27:18.421376 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:18 crc kubenswrapper[4922]: E0929 22:27:18.421627 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.518615 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.518704 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.518725 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.518752 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.518770 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.622540 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.622637 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.622691 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.622720 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.622738 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.725318 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.725385 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.725429 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.725454 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.725471 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.828036 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.828092 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.828111 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.828138 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.828156 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.930823 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.930897 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.930920 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.930946 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:18 crc kubenswrapper[4922]: I0929 22:27:18.930967 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:18Z","lastTransitionTime":"2025-09-29T22:27:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.038556 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.038618 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.038636 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.038665 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.038686 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.141140 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.141274 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.141295 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.141319 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.141386 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.244242 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.244332 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.244348 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.244428 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.244472 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.347567 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.347662 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.347682 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.347739 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.347757 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.421560 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:19 crc kubenswrapper[4922]: E0929 22:27:19.421829 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.451542 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.451607 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.451630 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.451663 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.451685 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.554786 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.554848 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.554866 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.554891 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.554910 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.656904 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.656938 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.656951 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.656975 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.656993 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.760112 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.760172 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.760190 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.760215 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.760232 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.862320 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.862357 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.862367 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.862381 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.862404 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.965816 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.966001 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.966062 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.966094 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:19 crc kubenswrapper[4922]: I0929 22:27:19.966156 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:19Z","lastTransitionTime":"2025-09-29T22:27:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.015662 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:20 crc kubenswrapper[4922]: E0929 22:27:20.015857 4922 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:20 crc kubenswrapper[4922]: E0929 22:27:20.015957 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs podName:51c5d7b9-741c-448f-b19e-9441e62a48c6 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:28.015923701 +0000 UTC m=+52.326212544 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs") pod "network-metrics-daemon-gkfvg" (UID: "51c5d7b9-741c-448f-b19e-9441e62a48c6") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.069694 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.069780 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.069797 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.070046 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.070066 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:20Z","lastTransitionTime":"2025-09-29T22:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.173029 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.173214 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.173241 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.173313 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.173339 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:20Z","lastTransitionTime":"2025-09-29T22:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.276562 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.276658 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.276682 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.276745 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.276765 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:20Z","lastTransitionTime":"2025-09-29T22:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.380144 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.380516 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.380690 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.380836 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.380962 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:20Z","lastTransitionTime":"2025-09-29T22:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.420807 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.420905 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:20 crc kubenswrapper[4922]: E0929 22:27:20.420961 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.421032 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:20 crc kubenswrapper[4922]: E0929 22:27:20.421192 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:20 crc kubenswrapper[4922]: E0929 22:27:20.421324 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.485095 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.485505 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.485672 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.485817 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.485938 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:20Z","lastTransitionTime":"2025-09-29T22:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.589164 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.589226 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.589244 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.589270 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.589288 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:20Z","lastTransitionTime":"2025-09-29T22:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.692786 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.692845 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.692862 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.692886 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.692906 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:20Z","lastTransitionTime":"2025-09-29T22:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.796526 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.796626 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.796686 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.796711 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.796729 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:20Z","lastTransitionTime":"2025-09-29T22:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.899453 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.899527 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.899563 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.899594 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:20 crc kubenswrapper[4922]: I0929 22:27:20.899617 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:20Z","lastTransitionTime":"2025-09-29T22:27:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.002855 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.002909 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.002928 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.002951 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.002971 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.106274 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.106362 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.106434 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.106470 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.106496 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.209433 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.209486 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.209509 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.209541 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.209565 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.313051 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.313117 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.313131 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.313160 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.313173 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.416652 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.416727 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.416751 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.416782 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.416810 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.421055 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:21 crc kubenswrapper[4922]: E0929 22:27:21.421248 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.519981 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.520046 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.520062 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.520088 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.520108 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.623323 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.623375 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.623438 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.623468 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.623493 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.726132 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.726193 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.726211 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.726235 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.726256 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.829208 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.829272 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.829289 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.829316 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.829334 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.932933 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.932998 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.933013 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.933036 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:21 crc kubenswrapper[4922]: I0929 22:27:21.933050 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:21Z","lastTransitionTime":"2025-09-29T22:27:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.036107 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.036160 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.036180 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.036207 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.036778 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.139229 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.139295 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.139318 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.139346 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.139372 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.243022 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.243072 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.243089 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.243113 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.243130 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.346203 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.346259 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.346281 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.346308 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.346330 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.421496 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.421701 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:22 crc kubenswrapper[4922]: E0929 22:27:22.421849 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:22 crc kubenswrapper[4922]: E0929 22:27:22.421702 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.421510 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:22 crc kubenswrapper[4922]: E0929 22:27:22.422022 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.449332 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.449463 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.449493 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.449524 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.449546 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.552852 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.552909 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.552925 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.552945 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.552958 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.656201 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.656260 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.656269 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.656289 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.656300 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.759714 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.759769 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.759786 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.759810 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.759826 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.862749 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.862876 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.862899 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.862927 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.862944 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.965546 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.965589 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.965607 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.965628 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:22 crc kubenswrapper[4922]: I0929 22:27:22.965645 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:22Z","lastTransitionTime":"2025-09-29T22:27:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.017800 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.017858 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.017877 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.017901 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.017917 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: E0929 22:27:23.035960 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:23Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.040197 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.040272 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.040289 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.040312 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.040332 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: E0929 22:27:23.060271 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:23Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.066710 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.066781 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.066795 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.066816 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.066857 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: E0929 22:27:23.086980 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:23Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.091692 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.091770 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.091794 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.092300 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.092586 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: E0929 22:27:23.110590 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:23Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.114676 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.114731 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.114752 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.114781 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.114805 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: E0929 22:27:23.134196 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:23Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:23 crc kubenswrapper[4922]: E0929 22:27:23.134642 4922 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.137237 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.137487 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.137647 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.138066 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.138112 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.248232 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.248310 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.248329 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.248354 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.248371 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.351521 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.351583 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.351600 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.351626 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.351643 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.420906 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:23 crc kubenswrapper[4922]: E0929 22:27:23.421161 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.455355 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.455460 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.455491 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.455518 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.455535 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.557773 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.557797 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.557804 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.557818 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.557829 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.660923 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.661001 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.661024 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.661060 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.661103 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.763810 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.763903 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.763921 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.763944 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.763962 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.866569 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.866613 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.866646 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.866664 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.866675 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.929000 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.929650 4922 scope.go:117] "RemoveContainer" containerID="b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.949574 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:23Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.970445 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.970638 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:23Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.970691 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.970904 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.970950 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.970978 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:23Z","lastTransitionTime":"2025-09-29T22:27:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:23 crc kubenswrapper[4922]: I0929 22:27:23.987611 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:23Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.004124 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.020878 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.039704 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.057899 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.072849 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.072920 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.072933 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.072968 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.072981 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.075054 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.107161 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\" 6404 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 22:27:09.673837 6404 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:09.673867 6404 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:09.673874 6404 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:09.673916 6404 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 22:27:09.673924 6404 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 22:27:09.673946 6404 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 22:27:09.673960 6404 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:09.673977 6404 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:09.673974 6404 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:09.673982 6404 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:09.674000 6404 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:09.674004 6404 factory.go:656] Stopping watch factory\\\\nI0929 22:27:09.674012 6404 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:09.674006 6404 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 22:27:09.674040 6404 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.121320 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.137262 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.152051 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.168358 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.176649 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.176706 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.176724 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.176750 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.176774 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.184260 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.199791 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.220810 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.279850 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.280182 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.280369 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.280581 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.280720 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.383588 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.383647 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.383665 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.383691 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.383709 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.421188 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.421209 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.422740 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:24 crc kubenswrapper[4922]: E0929 22:27:24.422886 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:24 crc kubenswrapper[4922]: E0929 22:27:24.423434 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:24 crc kubenswrapper[4922]: E0929 22:27:24.423521 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.486787 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.486870 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.486894 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.486924 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.486951 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.590472 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.590526 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.590543 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.590564 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.590581 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.692333 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.692369 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.692377 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.692404 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.692415 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.787156 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/1.log" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.790518 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.791154 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.794229 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.794282 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.794299 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.794325 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.794342 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.806460 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.826038 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.845135 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.870135 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.887030 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.896277 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.896344 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.896355 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.896383 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.896426 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.906755 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.922273 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.934307 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.947756 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.969096 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.981948 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.999501 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.999554 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.999573 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.999601 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:24 crc kubenswrapper[4922]: I0929 22:27:24.999622 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:24Z","lastTransitionTime":"2025-09-29T22:27:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.001722 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:24Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.017071 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.047555 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\" 6404 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 22:27:09.673837 6404 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:09.673867 6404 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:09.673874 6404 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:09.673916 6404 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 22:27:09.673924 6404 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 22:27:09.673946 6404 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 22:27:09.673960 6404 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:09.673977 6404 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:09.673974 6404 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:09.673982 6404 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:09.674000 6404 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:09.674004 6404 factory.go:656] Stopping watch factory\\\\nI0929 22:27:09.674012 6404 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:09.674006 6404 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 22:27:09.674040 6404 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.065534 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.081511 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.103122 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.103166 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.103185 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.103208 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.103228 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:25Z","lastTransitionTime":"2025-09-29T22:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.206543 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.206590 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.206607 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.206631 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.206649 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:25Z","lastTransitionTime":"2025-09-29T22:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.309594 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.309651 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.309672 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.309695 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.309712 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:25Z","lastTransitionTime":"2025-09-29T22:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.412488 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.412782 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.412951 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.413082 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.413202 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:25Z","lastTransitionTime":"2025-09-29T22:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.421098 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:25 crc kubenswrapper[4922]: E0929 22:27:25.421524 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.516281 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.516651 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.516805 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.516949 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.517073 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:25Z","lastTransitionTime":"2025-09-29T22:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.620727 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.620786 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.620805 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.620830 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.620846 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:25Z","lastTransitionTime":"2025-09-29T22:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.724288 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.724353 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.724371 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.724426 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.724443 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:25Z","lastTransitionTime":"2025-09-29T22:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.796206 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/2.log" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.797163 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/1.log" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.802749 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec" exitCode=1 Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.802891 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.802986 4922 scope.go:117] "RemoveContainer" containerID="b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.805227 4922 scope.go:117] "RemoveContainer" containerID="86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec" Sep 29 22:27:25 crc kubenswrapper[4922]: E0929 22:27:25.805631 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.827491 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.829531 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.829587 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.829605 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.829627 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.829644 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:25Z","lastTransitionTime":"2025-09-29T22:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.847518 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.871540 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.889908 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.911903 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.933142 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.933778 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.933835 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.933853 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.933878 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.933896 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:25Z","lastTransitionTime":"2025-09-29T22:27:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.954700 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.975906 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:25 crc kubenswrapper[4922]: I0929 22:27:25.996771 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:25Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.019736 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.039504 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.039555 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.039574 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.039601 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.039618 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.040258 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.060088 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.091223 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\" 6404 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 22:27:09.673837 6404 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:09.673867 6404 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:09.673874 6404 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:09.673916 6404 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 22:27:09.673924 6404 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 22:27:09.673946 6404 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 22:27:09.673960 6404 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:09.673977 6404 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:09.673974 6404 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:09.673982 6404 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:09.674000 6404 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:09.674004 6404 factory.go:656] Stopping watch factory\\\\nI0929 22:27:09.674012 6404 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:09.674006 6404 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 22:27:09.674040 6404 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.109448 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.127235 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.142712 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.142781 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.142805 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.142835 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.142859 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.144308 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.246565 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.246723 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.246745 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.246772 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.246803 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.350088 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.350133 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.350150 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.350174 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.350192 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.422108 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.422108 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.422270 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:26 crc kubenswrapper[4922]: E0929 22:27:26.422478 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:26 crc kubenswrapper[4922]: E0929 22:27:26.422682 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:26 crc kubenswrapper[4922]: E0929 22:27:26.422832 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.447056 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.453353 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.453436 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.453462 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.453490 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.453517 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.466335 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.492793 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.510790 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.530980 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.556683 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.556741 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.556758 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.556807 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.556826 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.565228 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.583803 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.603761 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.624015 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.641466 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.654911 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.660950 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.661197 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.661460 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.661659 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.661853 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.674370 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.692764 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.710584 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.743673 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b59d0fc94ccbc3353024bb39c0fa0d92003eb2e4b25e6f97ad3db01836b40c50\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"message\\\":\\\" 6404 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0929 22:27:09.673837 6404 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0929 22:27:09.673867 6404 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 22:27:09.673874 6404 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0929 22:27:09.673916 6404 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 22:27:09.673924 6404 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0929 22:27:09.673946 6404 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0929 22:27:09.673960 6404 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 22:27:09.673977 6404 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 22:27:09.673974 6404 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 22:27:09.673982 6404 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 22:27:09.674000 6404 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0929 22:27:09.674004 6404 factory.go:656] Stopping watch factory\\\\nI0929 22:27:09.674012 6404 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0929 22:27:09.674006 6404 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 22:27:09.674040 6404 handler.go:208] Removed *v1.Namespace ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.759562 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.765245 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.765300 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.765317 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.765341 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.765359 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.810168 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/2.log" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.816520 4922 scope.go:117] "RemoveContainer" containerID="86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec" Sep 29 22:27:26 crc kubenswrapper[4922]: E0929 22:27:26.817230 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.836245 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.853937 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.868439 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.868485 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.868503 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.868526 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.868543 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.884468 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.899650 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.916247 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.931706 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.948243 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.971341 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.972202 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.972276 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.972299 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.972326 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.972347 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:26Z","lastTransitionTime":"2025-09-29T22:27:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:26 crc kubenswrapper[4922]: I0929 22:27:26.995783 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:26Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.016797 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:27Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.036910 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:27Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.054773 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:27Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.072617 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:27Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.075371 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.075412 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.075421 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.075434 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.075471 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:27Z","lastTransitionTime":"2025-09-29T22:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.087700 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:27Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.108379 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:27Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.131044 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:27Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.178781 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.178851 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.178873 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.178905 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.178928 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:27Z","lastTransitionTime":"2025-09-29T22:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.282375 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.282483 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.282503 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.282530 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.282547 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:27Z","lastTransitionTime":"2025-09-29T22:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.385744 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.385883 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.385902 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.385930 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.385949 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:27Z","lastTransitionTime":"2025-09-29T22:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.421330 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:27 crc kubenswrapper[4922]: E0929 22:27:27.421559 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.489289 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.489352 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.489373 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.489425 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.489445 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:27Z","lastTransitionTime":"2025-09-29T22:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.592822 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.592882 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.592902 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.592929 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.592950 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:27Z","lastTransitionTime":"2025-09-29T22:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.696506 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.696571 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.696592 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.696618 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.696639 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:27Z","lastTransitionTime":"2025-09-29T22:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.799921 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.799979 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.799995 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.800016 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.800032 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:27Z","lastTransitionTime":"2025-09-29T22:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.903643 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.903696 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.903708 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.903727 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:27 crc kubenswrapper[4922]: I0929 22:27:27.903739 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:27Z","lastTransitionTime":"2025-09-29T22:27:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.006929 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.007002 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.007024 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.007062 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.007085 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.109918 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.110139 4922 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.110237 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs podName:51c5d7b9-741c-448f-b19e-9441e62a48c6 nodeName:}" failed. No retries permitted until 2025-09-29 22:27:44.110219622 +0000 UTC m=+68.420508445 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs") pod "network-metrics-daemon-gkfvg" (UID: "51c5d7b9-741c-448f-b19e-9441e62a48c6") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.112001 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.112039 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.112051 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.112072 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.112086 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.211168 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.211420 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:28:00.211361831 +0000 UTC m=+84.521650664 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.211798 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.211887 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.212040 4922 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.212044 4922 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.212127 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:28:00.21210246 +0000 UTC m=+84.522391313 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.212153 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:28:00.212141381 +0000 UTC m=+84.522430224 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.217133 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.217207 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.217231 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.217259 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.217281 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.313074 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.313144 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.313346 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.313366 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.313460 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.313485 4922 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.313385 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.313546 4922 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.313553 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 22:28:00.313531266 +0000 UTC m=+84.623820109 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.313603 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 22:28:00.313585987 +0000 UTC m=+84.623874790 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.320525 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.320575 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.320591 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.320616 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.320633 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.421273 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.421358 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.421431 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.421596 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.421700 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:28 crc kubenswrapper[4922]: E0929 22:27:28.421902 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.423234 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.423269 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.423282 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.423299 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.423313 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.527135 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.527193 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.527212 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.527238 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.527256 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.630387 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.630466 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.630484 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.630507 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.630525 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.733720 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.733790 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.733807 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.733834 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.733851 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.836730 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.836781 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.836799 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.836822 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.836839 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.940231 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.940292 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.940317 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.940351 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:28 crc kubenswrapper[4922]: I0929 22:27:28.940374 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:28Z","lastTransitionTime":"2025-09-29T22:27:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.043496 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.043569 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.043586 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.043613 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.043633 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.146832 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.146894 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.146911 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.146934 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.146952 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.249565 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.249664 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.249684 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.249709 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.249726 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.353048 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.353100 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.353113 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.353133 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.353147 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.421331 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:29 crc kubenswrapper[4922]: E0929 22:27:29.421521 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.455950 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.456022 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.456045 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.456075 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.456100 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.558440 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.558563 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.558587 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.558612 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.558630 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.661444 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.661481 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.661509 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.661539 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.661551 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.764490 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.764557 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.764574 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.764600 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.764617 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.866692 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.866729 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.866738 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.866752 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.866761 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.969969 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.970023 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.970045 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.970069 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:29 crc kubenswrapper[4922]: I0929 22:27:29.970086 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:29Z","lastTransitionTime":"2025-09-29T22:27:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.073187 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.073279 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.073310 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.073340 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.073365 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:30Z","lastTransitionTime":"2025-09-29T22:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.176960 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.177062 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.177087 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.177130 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.177157 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:30Z","lastTransitionTime":"2025-09-29T22:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.281045 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.281111 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.281134 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.281164 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.281188 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:30Z","lastTransitionTime":"2025-09-29T22:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.387199 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.387353 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.387378 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.387447 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.387478 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:30Z","lastTransitionTime":"2025-09-29T22:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.421062 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:30 crc kubenswrapper[4922]: E0929 22:27:30.421253 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.421362 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:30 crc kubenswrapper[4922]: E0929 22:27:30.421566 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.421820 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:30 crc kubenswrapper[4922]: E0929 22:27:30.422094 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.490626 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.490903 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.491049 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.491205 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.491353 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:30Z","lastTransitionTime":"2025-09-29T22:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.595380 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.595472 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.595491 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.595517 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.595537 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:30Z","lastTransitionTime":"2025-09-29T22:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.698910 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.698962 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.698978 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.699003 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.699021 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:30Z","lastTransitionTime":"2025-09-29T22:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.802818 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.802885 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.802902 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.802930 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.802950 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:30Z","lastTransitionTime":"2025-09-29T22:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.906535 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.906603 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.906621 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.906652 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:30 crc kubenswrapper[4922]: I0929 22:27:30.906672 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:30Z","lastTransitionTime":"2025-09-29T22:27:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.010224 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.010306 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.010324 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.010353 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.010384 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.115288 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.115433 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.115455 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.115490 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.115513 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.218805 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.218973 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.219000 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.219037 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.219061 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.322137 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.322216 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.322237 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.322273 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.322296 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.421822 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:31 crc kubenswrapper[4922]: E0929 22:27:31.422045 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.426296 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.426354 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.426377 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.426442 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.426473 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.529759 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.529822 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.529840 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.529866 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.529883 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.633666 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.633964 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.634102 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.634254 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.634419 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.737821 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.738086 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.738231 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.738433 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.738600 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.842133 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.842204 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.842223 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.842250 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.842269 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.945249 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.945305 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.945323 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.945355 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.945375 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:31Z","lastTransitionTime":"2025-09-29T22:27:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:31 crc kubenswrapper[4922]: I0929 22:27:31.994120 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.008994 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.020243 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.041482 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.047890 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.047973 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.047995 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.048025 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.048056 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.058128 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.076480 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.088900 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.102918 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.121242 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.140861 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.151351 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.151457 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.151475 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.151533 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.151553 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.162868 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.181436 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.199362 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.214815 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.243522 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.255294 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.255349 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.255368 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.255422 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.255440 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.260554 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.276216 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.290902 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:32Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.358523 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.358600 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.358621 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.358648 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.358665 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.421766 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.421880 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:32 crc kubenswrapper[4922]: E0929 22:27:32.422009 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.422243 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:32 crc kubenswrapper[4922]: E0929 22:27:32.422353 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:32 crc kubenswrapper[4922]: E0929 22:27:32.422561 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.462020 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.462078 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.462095 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.462120 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.462137 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.565923 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.566009 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.566027 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.566050 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.566067 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.668810 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.668881 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.668905 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.668934 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.668954 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.771723 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.771804 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.771828 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.771857 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.771879 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.875542 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.875780 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.875792 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.875812 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.875824 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.978852 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.978929 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.978952 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.978983 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:32 crc kubenswrapper[4922]: I0929 22:27:32.979006 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:32Z","lastTransitionTime":"2025-09-29T22:27:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.081883 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.081937 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.081954 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.081978 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.081995 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.185521 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.185577 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.185593 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.185618 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.185634 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.289188 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.289241 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.289257 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.289280 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.289298 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.392520 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.392580 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.392597 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.392621 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.392640 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.393991 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.394041 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.394058 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.394082 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.394098 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: E0929 22:27:33.414475 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:33Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.419863 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.419914 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.419947 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.419968 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.419980 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.421194 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:33 crc kubenswrapper[4922]: E0929 22:27:33.421443 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:33 crc kubenswrapper[4922]: E0929 22:27:33.443011 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:33Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.447884 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.447923 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.447939 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.447961 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.447978 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: E0929 22:27:33.467565 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:33Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.472583 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.472640 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.472658 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.472682 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.472699 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: E0929 22:27:33.492130 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:33Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.497934 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.498177 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.498222 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.498255 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.498278 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: E0929 22:27:33.518843 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:33Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:33 crc kubenswrapper[4922]: E0929 22:27:33.519060 4922 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.522161 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.522211 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.522228 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.522254 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.522275 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.626099 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.626159 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.626175 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.626200 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.626217 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.729571 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.729645 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.729668 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.729698 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.729722 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.832780 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.832826 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.832846 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.832871 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.832889 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.935665 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.935725 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.935744 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.935769 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:33 crc kubenswrapper[4922]: I0929 22:27:33.935800 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:33Z","lastTransitionTime":"2025-09-29T22:27:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.039127 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.039178 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.039197 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.039221 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.039237 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.141893 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.141964 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.141988 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.142026 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.142048 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.245642 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.245719 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.245739 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.245772 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.245796 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.349330 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.349412 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.349443 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.349467 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.349483 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.421022 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.421153 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:34 crc kubenswrapper[4922]: E0929 22:27:34.421227 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.421259 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:34 crc kubenswrapper[4922]: E0929 22:27:34.421432 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:34 crc kubenswrapper[4922]: E0929 22:27:34.421626 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.452639 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.452691 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.452708 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.452733 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.452750 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.556199 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.556264 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.556283 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.556309 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.556325 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.659298 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.659364 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.659384 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.659442 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.659463 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.761714 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.761806 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.761829 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.761859 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.761884 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.864972 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.865031 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.865048 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.865075 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.865092 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.967568 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.967631 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.967657 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.967682 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:34 crc kubenswrapper[4922]: I0929 22:27:34.967699 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:34Z","lastTransitionTime":"2025-09-29T22:27:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.071305 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.071358 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.071375 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.071429 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.071448 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:35Z","lastTransitionTime":"2025-09-29T22:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.175988 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.176495 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.176706 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.176893 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.177125 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:35Z","lastTransitionTime":"2025-09-29T22:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.280579 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.280630 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.280650 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.280674 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.280694 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:35Z","lastTransitionTime":"2025-09-29T22:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.384303 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.384368 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.384416 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.384443 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.384461 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:35Z","lastTransitionTime":"2025-09-29T22:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.421559 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:35 crc kubenswrapper[4922]: E0929 22:27:35.421745 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.487810 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.487864 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.487881 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.487903 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.487920 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:35Z","lastTransitionTime":"2025-09-29T22:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.591021 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.591066 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.591088 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.591116 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.591136 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:35Z","lastTransitionTime":"2025-09-29T22:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.694615 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.694669 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.694685 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.694709 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.694726 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:35Z","lastTransitionTime":"2025-09-29T22:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.798259 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.798307 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.798324 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.798344 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.798361 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:35Z","lastTransitionTime":"2025-09-29T22:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.902255 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.902328 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.902347 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.902375 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:35 crc kubenswrapper[4922]: I0929 22:27:35.902420 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:35Z","lastTransitionTime":"2025-09-29T22:27:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.005318 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.005534 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.005555 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.005581 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.005598 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.108918 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.108979 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.108996 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.109020 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.109039 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.211554 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.211618 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.211636 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.211662 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.211679 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.314515 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.314575 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.314592 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.314615 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.314632 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.417777 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.417826 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.417842 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.417867 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.417884 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.421800 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.421811 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:36 crc kubenswrapper[4922]: E0929 22:27:36.421947 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.422041 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:36 crc kubenswrapper[4922]: E0929 22:27:36.422135 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:36 crc kubenswrapper[4922]: E0929 22:27:36.422201 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.444615 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.467799 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.485104 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.518479 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.521298 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.521355 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.521374 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.521428 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.521448 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.536055 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.553799 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.572458 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.592763 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.612227 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.626770 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.626831 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.626848 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.626878 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.626896 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.636526 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.654137 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.675967 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.694036 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63825dd4-7651-45d1-bc7f-3517a21912db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.711710 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.730741 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.730824 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.730841 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.730867 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.730919 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.733088 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.754452 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.774066 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:36Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.833460 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.833823 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.834020 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.834222 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.834439 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.937363 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.937410 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.937422 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.937437 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:36 crc kubenswrapper[4922]: I0929 22:27:36.937446 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:36Z","lastTransitionTime":"2025-09-29T22:27:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.040647 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.040993 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.041194 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.041381 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.041630 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.144970 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.146310 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.146567 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.146838 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.147048 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.250301 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.250674 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.250692 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.250714 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.250734 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.353740 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.353787 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.353805 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.353827 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.353844 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.421735 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:37 crc kubenswrapper[4922]: E0929 22:27:37.421918 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.457330 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.457436 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.457460 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.457491 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.457512 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.561318 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.561435 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.561461 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.561493 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.561515 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.664528 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.664599 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.664622 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.664650 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.664671 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.768175 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.768236 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.768295 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.768326 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.768347 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.871671 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.871759 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.871782 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.872325 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.872634 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.975905 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.975942 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.975955 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.975970 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:37 crc kubenswrapper[4922]: I0929 22:27:37.975981 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:37Z","lastTransitionTime":"2025-09-29T22:27:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.079523 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.079668 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.079693 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.079725 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.079748 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:38Z","lastTransitionTime":"2025-09-29T22:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.183340 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.183450 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.183473 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.183499 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.183517 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:38Z","lastTransitionTime":"2025-09-29T22:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.286701 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.286765 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.286789 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.286818 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.286838 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:38Z","lastTransitionTime":"2025-09-29T22:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.389624 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.389702 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.389731 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.389763 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.389785 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:38Z","lastTransitionTime":"2025-09-29T22:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.420905 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.420924 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:38 crc kubenswrapper[4922]: E0929 22:27:38.421117 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.421191 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:38 crc kubenswrapper[4922]: E0929 22:27:38.421336 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:38 crc kubenswrapper[4922]: E0929 22:27:38.421557 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.492759 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.492825 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.492845 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.492873 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.492892 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:38Z","lastTransitionTime":"2025-09-29T22:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.596089 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.596147 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.596169 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.596229 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.596254 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:38Z","lastTransitionTime":"2025-09-29T22:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.699190 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.699257 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.699282 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.699312 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.699332 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:38Z","lastTransitionTime":"2025-09-29T22:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.802600 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.802674 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.802702 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.802733 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.802758 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:38Z","lastTransitionTime":"2025-09-29T22:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.906018 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.906078 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.906095 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.906119 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:38 crc kubenswrapper[4922]: I0929 22:27:38.906136 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:38Z","lastTransitionTime":"2025-09-29T22:27:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.007958 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.008020 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.008037 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.008060 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.008079 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.111676 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.111739 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.111756 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.111781 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.111797 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.214454 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.214506 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.214525 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.214547 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.214563 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.317165 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.317562 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.317736 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.317891 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.318033 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.420816 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.420849 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.421348 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.421370 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.421422 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.421440 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: E0929 22:27:39.421323 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.524342 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.524440 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.524465 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.524493 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.524510 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.627011 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.627089 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.627108 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.627132 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.627149 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.730485 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.730548 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.730569 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.730599 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.730620 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.832750 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.832785 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.832801 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.832826 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.832843 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.935583 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.935981 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.936216 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.936441 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:39 crc kubenswrapper[4922]: I0929 22:27:39.936610 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:39Z","lastTransitionTime":"2025-09-29T22:27:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.039785 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.039832 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.039849 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.039874 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.039892 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.142774 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.142815 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.142830 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.142851 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.142868 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.246266 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.246301 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.246312 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.246327 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.246337 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.348056 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.348075 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.348084 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.348093 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.348101 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.421179 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.421220 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.421179 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:40 crc kubenswrapper[4922]: E0929 22:27:40.421306 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:40 crc kubenswrapper[4922]: E0929 22:27:40.421419 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:40 crc kubenswrapper[4922]: E0929 22:27:40.421481 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.450075 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.450100 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.450107 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.450119 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.450127 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.553199 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.553247 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.553260 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.553279 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.553291 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.656655 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.656705 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.656723 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.656746 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.656764 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.759914 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.759975 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.759997 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.760028 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.760051 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.862607 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.862667 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.862684 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.862710 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.862728 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.965118 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.965157 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.965170 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.965186 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:40 crc kubenswrapper[4922]: I0929 22:27:40.965199 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:40Z","lastTransitionTime":"2025-09-29T22:27:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.067177 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.067224 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.067237 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.067254 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.067265 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.170284 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.170330 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.170341 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.170361 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.170373 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.273100 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.273152 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.273169 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.273193 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.273209 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.375277 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.375309 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.375318 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.375330 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.375339 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.420927 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:41 crc kubenswrapper[4922]: E0929 22:27:41.421083 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.478925 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.478984 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.479005 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.479031 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.479049 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.582432 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.582489 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.582498 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.582513 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.582521 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.685608 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.685685 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.685699 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.685714 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.685744 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.788460 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.788500 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.788508 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.788522 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.788532 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.890999 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.891058 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.891077 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.891107 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.891129 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.993795 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.994165 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.994353 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.994589 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:41 crc kubenswrapper[4922]: I0929 22:27:41.994987 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:41Z","lastTransitionTime":"2025-09-29T22:27:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.098223 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.098261 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.098272 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.098287 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.098297 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:42Z","lastTransitionTime":"2025-09-29T22:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.201308 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.201638 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.201773 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.201913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.202041 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:42Z","lastTransitionTime":"2025-09-29T22:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.305505 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.305575 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.305594 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.305619 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.305637 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:42Z","lastTransitionTime":"2025-09-29T22:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.407838 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.407885 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.407895 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.407911 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.407921 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:42Z","lastTransitionTime":"2025-09-29T22:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.421437 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.421621 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.421551 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:42 crc kubenswrapper[4922]: E0929 22:27:42.421781 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:42 crc kubenswrapper[4922]: E0929 22:27:42.422377 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:42 crc kubenswrapper[4922]: E0929 22:27:42.422587 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.423061 4922 scope.go:117] "RemoveContainer" containerID="86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec" Sep 29 22:27:42 crc kubenswrapper[4922]: E0929 22:27:42.423444 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.511297 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.511332 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.511347 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.511364 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.511376 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:42Z","lastTransitionTime":"2025-09-29T22:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.613366 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.613467 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.613490 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.613520 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.613545 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:42Z","lastTransitionTime":"2025-09-29T22:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.715783 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.715837 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.715855 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.715879 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.715896 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:42Z","lastTransitionTime":"2025-09-29T22:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.818304 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.818361 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.818380 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.818429 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.818447 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:42Z","lastTransitionTime":"2025-09-29T22:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.921674 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.921728 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.921747 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.921770 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:42 crc kubenswrapper[4922]: I0929 22:27:42.921788 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:42Z","lastTransitionTime":"2025-09-29T22:27:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.024234 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.024301 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.024325 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.024356 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.024375 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.126376 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.126470 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.126493 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.126521 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.126538 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.228609 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.228662 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.228680 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.228703 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.228720 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.330604 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.330664 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.330672 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.330704 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.330715 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.421133 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:43 crc kubenswrapper[4922]: E0929 22:27:43.421325 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.432880 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.432931 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.432948 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.432969 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.432987 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.535233 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.535292 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.535313 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.535337 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.535356 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.637802 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.637868 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.637895 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.637921 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.637939 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.740722 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.740775 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.740787 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.740807 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.740820 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.799190 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.799229 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.799238 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.799254 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.799263 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: E0929 22:27:43.812510 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:43Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.815752 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.815931 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.816053 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.816184 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.816329 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: E0929 22:27:43.829901 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:43Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.834122 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.834241 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.834305 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.834374 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.834458 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: E0929 22:27:43.845550 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:43Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.849084 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.849143 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.849160 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.849185 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.849201 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: E0929 22:27:43.862454 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:43Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.865522 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.865568 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.865585 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.865605 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.865622 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: E0929 22:27:43.881302 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:43Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:43 crc kubenswrapper[4922]: E0929 22:27:43.881477 4922 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.883139 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.883183 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.883200 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.883221 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.883238 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.985282 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.985309 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.985334 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.985346 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:43 crc kubenswrapper[4922]: I0929 22:27:43.985353 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:43Z","lastTransitionTime":"2025-09-29T22:27:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.087042 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.087084 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.087092 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.087107 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.087116 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:44Z","lastTransitionTime":"2025-09-29T22:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.189472 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.189496 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.189504 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.189515 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.189544 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:44Z","lastTransitionTime":"2025-09-29T22:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.201712 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:44 crc kubenswrapper[4922]: E0929 22:27:44.201857 4922 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:44 crc kubenswrapper[4922]: E0929 22:27:44.201907 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs podName:51c5d7b9-741c-448f-b19e-9441e62a48c6 nodeName:}" failed. No retries permitted until 2025-09-29 22:28:16.201894117 +0000 UTC m=+100.512182930 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs") pod "network-metrics-daemon-gkfvg" (UID: "51c5d7b9-741c-448f-b19e-9441e62a48c6") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.291710 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.291813 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.291839 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.291870 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.291892 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:44Z","lastTransitionTime":"2025-09-29T22:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.394092 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.394173 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.394197 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.394228 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.394253 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:44Z","lastTransitionTime":"2025-09-29T22:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.421819 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.421861 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.421910 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:44 crc kubenswrapper[4922]: E0929 22:27:44.422217 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:44 crc kubenswrapper[4922]: E0929 22:27:44.422422 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:44 crc kubenswrapper[4922]: E0929 22:27:44.422373 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.496542 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.496598 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.496614 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.496636 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.496653 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:44Z","lastTransitionTime":"2025-09-29T22:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.598500 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.598554 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.598571 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.598608 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.598625 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:44Z","lastTransitionTime":"2025-09-29T22:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.701226 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.701263 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.701273 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.701288 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.701300 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:44Z","lastTransitionTime":"2025-09-29T22:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.804170 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.804227 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.804243 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.804267 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.804286 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:44Z","lastTransitionTime":"2025-09-29T22:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.906166 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.906223 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.906244 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.906267 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:44 crc kubenswrapper[4922]: I0929 22:27:44.906286 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:44Z","lastTransitionTime":"2025-09-29T22:27:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.009035 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.009073 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.009084 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.009098 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.009110 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.111079 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.111107 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.111116 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.111130 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.111141 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.213158 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.213185 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.213192 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.213205 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.213214 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.315531 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.315581 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.315596 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.315618 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.315633 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.418648 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.418740 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.418765 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.418795 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.418818 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.420882 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:45 crc kubenswrapper[4922]: E0929 22:27:45.420996 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.521957 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.522049 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.522126 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.522148 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.522167 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.624109 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.624146 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.624156 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.624170 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.624181 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.726719 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.726761 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.726777 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.726799 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.726815 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.829350 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.829412 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.829429 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.829448 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.829463 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.880602 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-md9pf_6edd2cff-7363-4e99-8cc3-3db297410bce/kube-multus/0.log" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.880666 4922 generic.go:334] "Generic (PLEG): container finished" podID="6edd2cff-7363-4e99-8cc3-3db297410bce" containerID="2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799" exitCode=1 Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.880701 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-md9pf" event={"ID":"6edd2cff-7363-4e99-8cc3-3db297410bce","Type":"ContainerDied","Data":"2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.881142 4922 scope.go:117] "RemoveContainer" containerID="2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.896464 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"2025-09-29T22:26:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52\\\\n2025-09-29T22:26:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52 to /host/opt/cni/bin/\\\\n2025-09-29T22:26:59Z [verbose] multus-daemon started\\\\n2025-09-29T22:26:59Z [verbose] Readiness Indicator file check\\\\n2025-09-29T22:27:44Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:45Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.911952 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:45Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.944071 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63825dd4-7651-45d1-bc7f-3517a21912db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:45Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.948007 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.948063 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.948080 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.948104 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.948121 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:45Z","lastTransitionTime":"2025-09-29T22:27:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.971264 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:45Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:45 crc kubenswrapper[4922]: I0929 22:27:45.988611 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:45Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.003791 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.014077 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.024939 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.036722 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.047622 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.050486 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.050535 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.050551 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.050571 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.050589 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.060595 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.081206 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.089912 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.100312 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.111475 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.125047 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.134502 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.153339 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.153420 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.153431 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.153443 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.153451 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.256137 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.256163 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.256172 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.256183 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.256190 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.357892 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.357933 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.357950 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.357970 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.357987 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.420965 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:46 crc kubenswrapper[4922]: E0929 22:27:46.421049 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.421091 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.421101 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:46 crc kubenswrapper[4922]: E0929 22:27:46.421222 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:46 crc kubenswrapper[4922]: E0929 22:27:46.421377 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.435080 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.451710 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.461146 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.461188 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.461200 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.461219 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.461232 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.467426 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.478852 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.495479 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.510179 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"2025-09-29T22:26:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52\\\\n2025-09-29T22:26:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52 to /host/opt/cni/bin/\\\\n2025-09-29T22:26:59Z [verbose] multus-daemon started\\\\n2025-09-29T22:26:59Z [verbose] Readiness Indicator file check\\\\n2025-09-29T22:27:44Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.529271 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.545418 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63825dd4-7651-45d1-bc7f-3517a21912db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.559513 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.569796 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.569842 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.569860 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.569883 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.569903 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.576731 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.601429 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.612874 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.623875 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.638544 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.651254 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.664120 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.672291 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.672360 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.672379 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.672432 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.672451 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.674247 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.774512 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.774533 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.774542 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.774554 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.774563 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.876481 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.876507 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.876516 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.876529 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.876539 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.885505 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-md9pf_6edd2cff-7363-4e99-8cc3-3db297410bce/kube-multus/0.log" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.885590 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-md9pf" event={"ID":"6edd2cff-7363-4e99-8cc3-3db297410bce","Type":"ContainerStarted","Data":"940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.908030 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.925114 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.940049 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.957482 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.967243 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.978068 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.981534 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.981575 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.981592 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.981615 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.981631 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:46Z","lastTransitionTime":"2025-09-29T22:27:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:46 crc kubenswrapper[4922]: I0929 22:27:46.992875 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:46Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.006346 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.025047 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.048772 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.062198 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.078322 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.083975 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.084045 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.084070 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.084099 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.084124 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:47Z","lastTransitionTime":"2025-09-29T22:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.090161 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63825dd4-7651-45d1-bc7f-3517a21912db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.108706 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.124522 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.144193 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.163565 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"2025-09-29T22:26:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52\\\\n2025-09-29T22:26:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52 to /host/opt/cni/bin/\\\\n2025-09-29T22:26:59Z [verbose] multus-daemon started\\\\n2025-09-29T22:26:59Z [verbose] Readiness Indicator file check\\\\n2025-09-29T22:27:44Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:47Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.186219 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.186250 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.186260 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.186277 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.186288 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:47Z","lastTransitionTime":"2025-09-29T22:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.289362 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.289438 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.289455 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.289477 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.289496 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:47Z","lastTransitionTime":"2025-09-29T22:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.391971 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.391997 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.392006 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.392018 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.392026 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:47Z","lastTransitionTime":"2025-09-29T22:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.421242 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:47 crc kubenswrapper[4922]: E0929 22:27:47.421336 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.494879 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.494954 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.494976 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.495006 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.495028 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:47Z","lastTransitionTime":"2025-09-29T22:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.597726 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.597794 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.597812 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.597839 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.597859 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:47Z","lastTransitionTime":"2025-09-29T22:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.700825 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.700903 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.700926 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.700957 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.700981 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:47Z","lastTransitionTime":"2025-09-29T22:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.803551 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.803612 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.803635 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.803660 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.803682 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:47Z","lastTransitionTime":"2025-09-29T22:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.905591 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.905626 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.905635 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.905650 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:47 crc kubenswrapper[4922]: I0929 22:27:47.905660 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:47Z","lastTransitionTime":"2025-09-29T22:27:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.008383 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.008434 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.008444 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.008458 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.008468 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.111442 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.111514 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.111533 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.111563 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.111581 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.214743 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.214856 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.214883 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.214913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.214935 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.318293 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.318348 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.318373 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.318431 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.318457 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.421130 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:48 crc kubenswrapper[4922]: E0929 22:27:48.421274 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.421289 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.421332 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:48 crc kubenswrapper[4922]: E0929 22:27:48.421445 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:48 crc kubenswrapper[4922]: E0929 22:27:48.421569 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.421720 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.421808 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.421828 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.421856 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.421876 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.526053 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.526710 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.527136 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.527305 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.527469 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.634443 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.634508 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.634530 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.634559 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.634582 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.737117 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.737183 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.737201 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.737227 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.737244 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.840749 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.840852 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.840907 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.840935 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.840954 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.943943 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.943991 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.944001 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.944020 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:48 crc kubenswrapper[4922]: I0929 22:27:48.944032 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:48Z","lastTransitionTime":"2025-09-29T22:27:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.047549 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.047618 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.047636 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.047663 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.047682 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.150328 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.150374 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.150417 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.150441 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.150460 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.253965 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.254009 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.254025 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.254047 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.254066 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.357312 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.357737 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.357900 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.358043 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.358185 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.421114 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:49 crc kubenswrapper[4922]: E0929 22:27:49.421513 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.461033 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.461342 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.461519 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.461694 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.461843 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.565454 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.565516 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.565536 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.565564 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.565583 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.667477 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.667843 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.668000 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.668153 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.668294 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.771264 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.771555 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.771681 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.771806 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.771918 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.875037 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.875078 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.875090 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.875106 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.875119 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.977313 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.977487 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.977569 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.977641 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:49 crc kubenswrapper[4922]: I0929 22:27:49.977720 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:49Z","lastTransitionTime":"2025-09-29T22:27:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.079823 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.080057 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.080115 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.080189 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.080252 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:50Z","lastTransitionTime":"2025-09-29T22:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.183054 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.183114 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.183134 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.183162 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.183182 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:50Z","lastTransitionTime":"2025-09-29T22:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.285660 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.285719 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.285737 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.285764 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.285783 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:50Z","lastTransitionTime":"2025-09-29T22:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.388285 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.388333 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.388350 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.388375 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.388424 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:50Z","lastTransitionTime":"2025-09-29T22:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.421865 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.421986 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.422818 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:50 crc kubenswrapper[4922]: E0929 22:27:50.423097 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:50 crc kubenswrapper[4922]: E0929 22:27:50.423344 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:50 crc kubenswrapper[4922]: E0929 22:27:50.423589 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.492045 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.492099 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.492116 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.492144 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.492167 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:50Z","lastTransitionTime":"2025-09-29T22:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.594766 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.594828 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.594850 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.594881 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.594903 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:50Z","lastTransitionTime":"2025-09-29T22:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.698029 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.698092 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.698114 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.698145 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.698164 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:50Z","lastTransitionTime":"2025-09-29T22:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.800959 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.801307 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.801592 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.801769 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.801896 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:50Z","lastTransitionTime":"2025-09-29T22:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.904302 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.904660 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.904733 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.904772 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:50 crc kubenswrapper[4922]: I0929 22:27:50.904803 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:50Z","lastTransitionTime":"2025-09-29T22:27:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.008257 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.008662 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.008803 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.009035 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.009173 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.112553 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.112616 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.112633 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.112657 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.112676 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.215720 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.216050 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.216233 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.216426 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.216590 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.319849 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.319920 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.319939 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.319968 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.319990 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.420803 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:51 crc kubenswrapper[4922]: E0929 22:27:51.421025 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.422921 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.423620 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.423666 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.423706 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.423724 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.526619 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.527024 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.527160 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.527302 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.527469 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.632071 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.632133 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.632157 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.632184 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.632202 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.735185 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.735245 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.735266 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.735291 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.735309 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.838101 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.838173 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.838190 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.838215 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.838238 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.941185 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.941267 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.941293 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.941325 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:51 crc kubenswrapper[4922]: I0929 22:27:51.941349 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:51Z","lastTransitionTime":"2025-09-29T22:27:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.044553 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.044646 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.044666 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.044690 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.044740 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.148021 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.148094 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.148112 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.148141 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.148166 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.251257 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.251297 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.251305 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.251319 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.251329 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.355035 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.355100 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.355118 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.355144 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.355161 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.421442 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.421480 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:52 crc kubenswrapper[4922]: E0929 22:27:52.421615 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.421693 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:52 crc kubenswrapper[4922]: E0929 22:27:52.421873 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:52 crc kubenswrapper[4922]: E0929 22:27:52.422047 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.458777 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.458834 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.458853 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.458881 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.458899 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.560867 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.560929 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.560949 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.560975 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.560992 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.664455 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.664510 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.664528 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.664562 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.664581 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.767369 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.767467 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.767485 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.767517 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.767535 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.869722 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.869780 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.869799 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.869824 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.869839 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.973098 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.973164 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.973186 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.973213 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:52 crc kubenswrapper[4922]: I0929 22:27:52.973232 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:52Z","lastTransitionTime":"2025-09-29T22:27:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.077606 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.077679 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.077697 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.077728 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.077750 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:53Z","lastTransitionTime":"2025-09-29T22:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.181120 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.181186 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.181204 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.181230 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.181248 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:53Z","lastTransitionTime":"2025-09-29T22:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.283899 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.283930 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.283942 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.283957 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.283968 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:53Z","lastTransitionTime":"2025-09-29T22:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.387196 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.387282 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.387311 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.387346 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.387371 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:53Z","lastTransitionTime":"2025-09-29T22:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.421916 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:53 crc kubenswrapper[4922]: E0929 22:27:53.422525 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.423040 4922 scope.go:117] "RemoveContainer" containerID="86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.491112 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.491159 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.491175 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.491201 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.491217 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:53Z","lastTransitionTime":"2025-09-29T22:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.594114 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.594183 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.594207 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.594239 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.594261 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:53Z","lastTransitionTime":"2025-09-29T22:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.696872 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.696907 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.696917 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.696932 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.696943 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:53Z","lastTransitionTime":"2025-09-29T22:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.807913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.807973 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.807992 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.808016 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.808034 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:53Z","lastTransitionTime":"2025-09-29T22:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.911452 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.911486 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.911498 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.911518 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.911530 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:53Z","lastTransitionTime":"2025-09-29T22:27:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.915617 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/2.log" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.920109 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.920723 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.948875 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:53Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.964067 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:53Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.979678 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:53Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:53 crc kubenswrapper[4922]: I0929 22:27:53.996073 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:53Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.010099 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.014858 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.014920 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.014943 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.014972 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.014994 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.021751 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63825dd4-7651-45d1-bc7f-3517a21912db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.035153 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.052370 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.070749 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.090040 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"2025-09-29T22:26:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52\\\\n2025-09-29T22:26:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52 to /host/opt/cni/bin/\\\\n2025-09-29T22:26:59Z [verbose] multus-daemon started\\\\n2025-09-29T22:26:59Z [verbose] Readiness Indicator file check\\\\n2025-09-29T22:27:44Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.111732 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.111790 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.111808 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.111834 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.111855 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.116814 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.126847 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.131928 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.131988 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.132011 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.132039 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.132057 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.143462 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.148869 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.158675 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.198558 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.198627 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.198640 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.198658 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.198672 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.205096 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.214631 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.218768 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.218833 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.218852 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.218877 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.218895 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.224026 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.236817 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.240264 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.241365 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.241427 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.241440 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.241461 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.241475 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.252974 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.255505 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.255618 4922 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.256866 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.256934 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.256944 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.256956 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.256965 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.359228 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.359254 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.359263 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.359274 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.359282 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.421229 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.421254 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.421596 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.421783 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.422104 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.422320 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.462198 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.462268 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.462285 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.462311 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.462332 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.565665 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.565734 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.565800 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.565830 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.565846 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.670325 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.670468 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.670496 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.670530 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.670556 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.773980 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.774047 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.774068 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.774097 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.774116 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.876540 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.876601 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.876625 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.876658 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.876677 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.925679 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/3.log" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.926706 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/2.log" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.930852 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" exitCode=1 Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.930916 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.930984 4922 scope.go:117] "RemoveContainer" containerID="86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.931938 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:27:54 crc kubenswrapper[4922]: E0929 22:27:54.932266 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.958167 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.978947 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.979741 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.979804 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.979822 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.979847 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.979864 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:54Z","lastTransitionTime":"2025-09-29T22:27:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:54 crc kubenswrapper[4922]: I0929 22:27:54.999036 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:54Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.019176 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"2025-09-29T22:26:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52\\\\n2025-09-29T22:26:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52 to /host/opt/cni/bin/\\\\n2025-09-29T22:26:59Z [verbose] multus-daemon started\\\\n2025-09-29T22:26:59Z [verbose] Readiness Indicator file check\\\\n2025-09-29T22:27:44Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.042288 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.062241 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63825dd4-7651-45d1-bc7f-3517a21912db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.083514 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.083757 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.083796 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.083811 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.083832 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.083848 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:55Z","lastTransitionTime":"2025-09-29T22:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.101823 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.131424 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86c36b114f8c8c56be299c89f4e08c10736e3bfb972e4480a8dd78f705c50eec\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:24Z\\\",\\\"message\\\":\\\"mns:[] Mutations:[{Column:policies Mutator:insert Value:{GoSet:[{GoUUID:a5a72d02-1a0f-4f7f-a8c5-6923a1c4274a}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f6d604c1-9711-4e25-be6c-79ec28bbad1b}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 22:27:24.890095 6608 address_set.go:302] New(0d39bc5c-d5b9-432c-81be-2275bce5d7aa/default-network-controller:EgressIP:node-ips:v4:default/a712973235162149816) with []\\\\nI0929 22:27:24.890143 6608 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0929 22:27:24.890172 6608 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0929 22:27:24.890267 6608 factory.go:1336] Added *v1.Node event handler 7\\\\nI0929 22:27:24.890327 6608 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0929 22:27:24.890749 6608 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0929 22:27:24.890850 6608 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0929 22:27:24.890894 6608 ovnkube.go:599] Stopped ovnkube\\\\nI0929 22:27:24.890944 6608 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 22:27:24.891028 6608 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"erLBGroup\\\\\\\"}}}\\\\nI0929 22:27:54.439234 6991 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI0929 22:27:54.439249 6991 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 22:27:54.439262 6991 services_controller.go:454] Service openshift-ingress-operator/metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI0929 22:27:54.439280 6991 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0929 22:27:54.439319 6991 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped alr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.147718 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.163492 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.180344 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.186841 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.186888 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.186907 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.186929 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.186946 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:55Z","lastTransitionTime":"2025-09-29T22:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.197676 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.222221 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.238546 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.254526 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.274687 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.290136 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.290184 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.290193 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.290210 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.290223 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:55Z","lastTransitionTime":"2025-09-29T22:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.393119 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.393181 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.393198 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.393223 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.393241 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:55Z","lastTransitionTime":"2025-09-29T22:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.421233 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:55 crc kubenswrapper[4922]: E0929 22:27:55.421483 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.496754 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.496816 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.496833 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.496861 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.496885 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:55Z","lastTransitionTime":"2025-09-29T22:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.600587 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.600647 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.600663 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.600693 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.600711 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:55Z","lastTransitionTime":"2025-09-29T22:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.703751 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.703842 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.703866 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.704314 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.704670 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:55Z","lastTransitionTime":"2025-09-29T22:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.807198 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.807263 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.807285 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.807316 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.807337 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:55Z","lastTransitionTime":"2025-09-29T22:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.910607 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.910683 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.910708 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.910744 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.910762 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:55Z","lastTransitionTime":"2025-09-29T22:27:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.937917 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/3.log" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.941977 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:27:55 crc kubenswrapper[4922]: E0929 22:27:55.942207 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.962212 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63825dd4-7651-45d1-bc7f-3517a21912db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:55 crc kubenswrapper[4922]: I0929 22:27:55.984044 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:55Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.006066 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.020159 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.020237 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.020261 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.020294 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.020317 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.026538 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.046950 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"2025-09-29T22:26:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52\\\\n2025-09-29T22:26:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52 to /host/opt/cni/bin/\\\\n2025-09-29T22:26:59Z [verbose] multus-daemon started\\\\n2025-09-29T22:26:59Z [verbose] Readiness Indicator file check\\\\n2025-09-29T22:27:44Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.071661 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.091045 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.109111 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.124159 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.124204 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.124221 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.124247 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.124264 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.127411 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.165708 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"erLBGroup\\\\\\\"}}}\\\\nI0929 22:27:54.439234 6991 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI0929 22:27:54.439249 6991 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 22:27:54.439262 6991 services_controller.go:454] Service openshift-ingress-operator/metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI0929 22:27:54.439280 6991 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0929 22:27:54.439319 6991 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped alr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.178936 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.193160 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.206253 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.223830 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.228461 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.228516 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.228539 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.228568 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.228592 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.247915 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.265716 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.286991 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.332046 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.332109 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.332127 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.332152 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.332170 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.421436 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.421548 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:56 crc kubenswrapper[4922]: E0929 22:27:56.421644 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:56 crc kubenswrapper[4922]: E0929 22:27:56.421726 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.421856 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:56 crc kubenswrapper[4922]: E0929 22:27:56.421959 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.435141 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.435196 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.435215 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.435241 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.435259 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.441618 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.464606 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.483148 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.503794 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.521855 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63825dd4-7651-45d1-bc7f-3517a21912db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.538641 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.539000 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.539048 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.539065 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.539086 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.539104 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.560528 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.580576 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.600815 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"2025-09-29T22:26:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52\\\\n2025-09-29T22:26:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52 to /host/opt/cni/bin/\\\\n2025-09-29T22:26:59Z [verbose] multus-daemon started\\\\n2025-09-29T22:26:59Z [verbose] Readiness Indicator file check\\\\n2025-09-29T22:27:44Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.621874 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.639764 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.642219 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.642287 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.642314 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.642347 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.642372 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.658194 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.674623 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.704760 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"erLBGroup\\\\\\\"}}}\\\\nI0929 22:27:54.439234 6991 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI0929 22:27:54.439249 6991 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 22:27:54.439262 6991 services_controller.go:454] Service openshift-ingress-operator/metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI0929 22:27:54.439280 6991 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0929 22:27:54.439319 6991 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped alr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.720512 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.736013 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.746611 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.746712 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.746735 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.746763 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.746782 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.753370 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:27:56Z is after 2025-08-24T17:21:41Z" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.849912 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.849973 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.849991 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.850016 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.850033 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.952939 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.952992 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.953004 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.953021 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:56 crc kubenswrapper[4922]: I0929 22:27:56.953035 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:56Z","lastTransitionTime":"2025-09-29T22:27:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.056270 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.056344 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.056368 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.056428 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.056454 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.167243 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.167284 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.167300 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.167321 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.167340 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.270834 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.270900 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.270917 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.270943 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.270961 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.374620 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.374706 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.374736 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.374770 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.374793 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.421679 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:57 crc kubenswrapper[4922]: E0929 22:27:57.421901 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.478265 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.478328 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.478351 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.478380 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.478431 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.581873 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.581958 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.582006 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.582029 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.582046 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.684438 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.684505 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.684527 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.684557 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.684577 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.787785 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.787846 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.787863 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.787913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.787933 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.890846 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.890905 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.890922 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.890948 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.890966 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.993316 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.993376 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.993432 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.993463 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:57 crc kubenswrapper[4922]: I0929 22:27:57.993486 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:57Z","lastTransitionTime":"2025-09-29T22:27:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.096772 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.096841 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.096860 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.096884 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.096904 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:58Z","lastTransitionTime":"2025-09-29T22:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.199432 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.199519 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.199569 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.199721 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.199740 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:58Z","lastTransitionTime":"2025-09-29T22:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.302783 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.302851 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.302865 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.302881 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.302894 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:58Z","lastTransitionTime":"2025-09-29T22:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.405871 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.405935 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.405956 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.405981 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.406000 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:58Z","lastTransitionTime":"2025-09-29T22:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.421359 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.421443 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.421373 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:27:58 crc kubenswrapper[4922]: E0929 22:27:58.421592 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:27:58 crc kubenswrapper[4922]: E0929 22:27:58.421683 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:27:58 crc kubenswrapper[4922]: E0929 22:27:58.421785 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.508891 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.508954 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.508972 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.508999 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.509017 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:58Z","lastTransitionTime":"2025-09-29T22:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.611947 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.612026 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.612052 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.612082 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.612103 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:58Z","lastTransitionTime":"2025-09-29T22:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.714611 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.714675 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.714693 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.714719 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.714738 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:58Z","lastTransitionTime":"2025-09-29T22:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.816987 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.817070 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.817087 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.817115 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.817132 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:58Z","lastTransitionTime":"2025-09-29T22:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.919594 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.919669 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.919692 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.919716 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:58 crc kubenswrapper[4922]: I0929 22:27:58.919733 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:58Z","lastTransitionTime":"2025-09-29T22:27:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.022527 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.022596 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.022620 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.022649 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.022671 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.125742 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.125802 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.125827 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.125861 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.125882 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.228170 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.228230 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.228248 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.228270 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.228287 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.331065 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.331147 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.331165 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.331189 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.331208 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.421420 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:27:59 crc kubenswrapper[4922]: E0929 22:27:59.421634 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.434862 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.434936 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.434959 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.434989 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.435014 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.538445 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.538518 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.538538 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.538568 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.538589 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.641725 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.641799 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.641817 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.641843 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.641860 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.744099 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.744981 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.745174 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.745333 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.745524 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.848356 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.848424 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.848445 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.848468 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.848485 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.952080 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.952380 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.952563 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.952723 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:27:59 crc kubenswrapper[4922]: I0929 22:27:59.952870 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:27:59Z","lastTransitionTime":"2025-09-29T22:27:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.055846 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.056134 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.056312 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.056500 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.056643 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.159499 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.159558 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.159576 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.159597 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.159613 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.262850 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.262913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.262934 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.262959 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.262978 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.284585 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.284709 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.284830 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.284806073 +0000 UTC m=+148.595094916 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.284882 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.284932 4922 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.285039 4922 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.285061 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.285030419 +0000 UTC m=+148.595319262 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.285090 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.28507642 +0000 UTC m=+148.595365263 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.366512 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.366567 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.366585 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.366608 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.366626 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.386295 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.386495 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.386552 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.386589 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.386609 4922 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.386630 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.386661 4922 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.386679 4922 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.386693 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.386671841 +0000 UTC m=+148.696960684 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.386732 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.386714292 +0000 UTC m=+148.697003145 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.421546 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.421642 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.421570 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.421726 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.421825 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:00 crc kubenswrapper[4922]: E0929 22:28:00.422069 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.469706 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.469766 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.469788 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.469823 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.469845 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.572132 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.572183 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.572201 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.572226 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.572245 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.675628 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.675697 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.675715 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.675740 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.675757 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.778786 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.778854 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.778872 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.778898 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.778915 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.882310 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.882368 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.882385 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.882431 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.882450 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.985176 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.985232 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.985249 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.985272 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:00 crc kubenswrapper[4922]: I0929 22:28:00.985290 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:00Z","lastTransitionTime":"2025-09-29T22:28:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.089205 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.089286 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.089310 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.089339 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.089361 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:01Z","lastTransitionTime":"2025-09-29T22:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.192706 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.192765 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.192783 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.192810 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.192827 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:01Z","lastTransitionTime":"2025-09-29T22:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.295956 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.296018 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.296037 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.296061 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.296081 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:01Z","lastTransitionTime":"2025-09-29T22:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.398858 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.398921 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.398940 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.398967 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.398986 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:01Z","lastTransitionTime":"2025-09-29T22:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.421175 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:01 crc kubenswrapper[4922]: E0929 22:28:01.421351 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.501973 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.502036 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.502060 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.502090 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.502110 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:01Z","lastTransitionTime":"2025-09-29T22:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.605830 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.605899 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.605918 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.605945 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.605963 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:01Z","lastTransitionTime":"2025-09-29T22:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.709079 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.709146 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.709169 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.709202 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.709222 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:01Z","lastTransitionTime":"2025-09-29T22:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.812241 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.812294 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.812310 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.812334 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.812352 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:01Z","lastTransitionTime":"2025-09-29T22:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.915131 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.915196 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.915213 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.915240 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:01 crc kubenswrapper[4922]: I0929 22:28:01.915257 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:01Z","lastTransitionTime":"2025-09-29T22:28:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.018287 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.018339 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.018354 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.018375 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.018424 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.126340 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.126424 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.126441 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.126466 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.126481 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.229336 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.229414 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.229432 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.229487 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.229505 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.332039 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.332107 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.332138 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.332171 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.332194 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.421239 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.421309 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.421315 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:02 crc kubenswrapper[4922]: E0929 22:28:02.421553 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:02 crc kubenswrapper[4922]: E0929 22:28:02.421635 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:02 crc kubenswrapper[4922]: E0929 22:28:02.421798 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.434707 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.434769 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.434795 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.434824 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.434846 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.537888 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.537941 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.537957 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.537981 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.537997 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.641370 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.641553 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.641582 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.641661 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.641750 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.745051 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.745115 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.745140 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.745170 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.745192 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.848513 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.848563 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.848581 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.848605 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.848620 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.952965 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.953037 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.953058 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.953084 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:02 crc kubenswrapper[4922]: I0929 22:28:02.953101 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:02Z","lastTransitionTime":"2025-09-29T22:28:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.056326 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.056385 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.056457 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.056495 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.056519 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:03Z","lastTransitionTime":"2025-09-29T22:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.177937 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.178017 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.178038 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.178062 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.178079 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:03Z","lastTransitionTime":"2025-09-29T22:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.280587 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.280630 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.280646 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.280668 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.280684 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:03Z","lastTransitionTime":"2025-09-29T22:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.383969 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.384032 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.384048 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.384074 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.384090 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:03Z","lastTransitionTime":"2025-09-29T22:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.420866 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:03 crc kubenswrapper[4922]: E0929 22:28:03.421068 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.488279 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.488341 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.488358 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.488386 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.488445 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:03Z","lastTransitionTime":"2025-09-29T22:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.590538 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.590587 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.590603 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.590690 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.590747 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:03Z","lastTransitionTime":"2025-09-29T22:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.694449 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.694484 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.694494 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.694510 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.694520 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:03Z","lastTransitionTime":"2025-09-29T22:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.797750 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.797813 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.797830 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.797855 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.797872 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:03Z","lastTransitionTime":"2025-09-29T22:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.906541 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.906604 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.906621 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.906643 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:03 crc kubenswrapper[4922]: I0929 22:28:03.906659 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:03Z","lastTransitionTime":"2025-09-29T22:28:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.009846 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.009902 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.009920 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.009942 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.009960 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.112932 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.112993 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.113011 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.113034 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.113051 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.215633 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.215687 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.215702 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.215725 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.215742 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.318714 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.318776 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.318793 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.318817 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.318834 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.412506 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.412634 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.412653 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.412682 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.412699 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.421199 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.421199 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.421376 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:04 crc kubenswrapper[4922]: E0929 22:28:04.421545 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:04 crc kubenswrapper[4922]: E0929 22:28:04.421689 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:04 crc kubenswrapper[4922]: E0929 22:28:04.422060 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:04 crc kubenswrapper[4922]: E0929 22:28:04.432892 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.437630 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.437676 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.437694 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.437716 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.437734 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: E0929 22:28:04.456135 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.489771 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.489832 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.489850 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.489872 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.489890 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: E0929 22:28:04.507097 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.511689 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.511755 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.511773 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.511797 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.511814 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: E0929 22:28:04.527547 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.532275 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.532325 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.532342 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.532365 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.532382 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: E0929 22:28:04.552035 4922 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T22:28:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ddd4882-ce71-4215-8ae4-d2eabf83bed6\\\",\\\"systemUUID\\\":\\\"dedd0e07-aa25-477e-8ea0-1bf77e1043bf\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:04Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:04 crc kubenswrapper[4922]: E0929 22:28:04.552251 4922 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.554379 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.554462 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.554481 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.554506 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.554523 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.657887 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.657934 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.657952 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.657974 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.657990 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.761025 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.761073 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.761090 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.761112 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.761129 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.864163 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.864217 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.864236 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.864263 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.864283 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.967575 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.967671 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.967689 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.967716 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:04 crc kubenswrapper[4922]: I0929 22:28:04.967733 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:04Z","lastTransitionTime":"2025-09-29T22:28:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.070582 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.070638 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.070657 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.070680 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.070696 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:05Z","lastTransitionTime":"2025-09-29T22:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.173827 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.173884 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.173902 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.173929 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.173979 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:05Z","lastTransitionTime":"2025-09-29T22:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.277441 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.277498 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.277515 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.277539 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.277558 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:05Z","lastTransitionTime":"2025-09-29T22:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.380873 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.380933 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.380949 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.380974 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.380992 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:05Z","lastTransitionTime":"2025-09-29T22:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.420888 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:05 crc kubenswrapper[4922]: E0929 22:28:05.421076 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.483075 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.483163 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.483197 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.483229 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.483250 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:05Z","lastTransitionTime":"2025-09-29T22:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.586756 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.586838 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.586862 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.586893 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.586915 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:05Z","lastTransitionTime":"2025-09-29T22:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.691273 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.691334 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.691352 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.691376 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.691417 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:05Z","lastTransitionTime":"2025-09-29T22:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.795071 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.795485 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.795503 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.795526 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.795543 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:05Z","lastTransitionTime":"2025-09-29T22:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.898614 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.898671 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.898692 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.898721 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:05 crc kubenswrapper[4922]: I0929 22:28:05.898741 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:05Z","lastTransitionTime":"2025-09-29T22:28:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.002090 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.002171 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.002197 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.002229 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.002251 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.106062 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.106123 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.106140 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.106165 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.106183 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.208309 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.208353 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.208363 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.208378 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.208405 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.311090 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.311147 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.311163 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.311188 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.311206 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.414215 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.414273 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.414291 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.414314 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.414331 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.421688 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.421716 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.422101 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:06 crc kubenswrapper[4922]: E0929 22:28:06.422379 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:06 crc kubenswrapper[4922]: E0929 22:28:06.422537 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:06 crc kubenswrapper[4922]: E0929 22:28:06.422664 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.442489 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"09d9860a-bccf-4df5-8664-3af823b9bec5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7382643cbb79e9f1b8a80e01df7beaae7d0e6776b3f52a832f8edfc42503a4fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1199a80d4c059fe53517a54a8ea423c994d24d3cf85989dad4a695e77c84a513\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4jnwb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-s5zhx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.463289 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ac6711a4d52a9efaf8f30fac53dd6aba59f2578dc5a5eadf0a3b96b71f33045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.483305 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.506468 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95e270b3-769f-4de2-9beb-6d425d722986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd42223914155ed2848ad4f0c805ed1a3e382834afa0d7669136e2513a8ea9a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://27bd03eff56164c0fafff68e8cf32f9ffee34a5b737c45e5529d9a8965ec77d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f74702ddca2b1cc9206c0e1d63b3a61964cf3812fd8e64236a5523bc8168b94b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://767d46c381d38f208535124e97cee84aea92dd3d088e67214caf0d35fbaceecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ca5385d503e9cf8f06afa0417b530ba5faaffc68f0e2629e0e2d460e4e4a739\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b56d59a21f2d69f901a1096d077abb2beff7a79df038b6aaf0245179fe21df\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37cd1ca1d3c92c73212aeed8afbc8d8a3736a9a05e6761cbbc86402b22fada3d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:27:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5xbwf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-wvnl9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.516913 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.516978 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.516995 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.517021 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.517038 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.530507 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.551848 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7565b7e5cbea0684f873f56cd055e420cd707bfdee38553f09a150dc7969f25a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://466dc42f75e9d8e6ec2bf067e1bec4269bac93644dfe0208f747344afe103d1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.578010 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-md9pf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6edd2cff-7363-4e99-8cc3-3db297410bce\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:45Z\\\",\\\"message\\\":\\\"2025-09-29T22:26:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52\\\\n2025-09-29T22:26:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_277e5769-b9e5-4f6c-96bc-34dacf3d3c52 to /host/opt/cni/bin/\\\\n2025-09-29T22:26:59Z [verbose] multus-daemon started\\\\n2025-09-29T22:26:59Z [verbose] Readiness Indicator file check\\\\n2025-09-29T22:27:44Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qfdm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-md9pf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.602554 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f444dd0-4d59-41d7-a23e-16910f55ce2f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d56bb0b4e98c39de8949c997f96deda37009e7b1a7158a43aeda8b3e561faddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0c10db62d875e42ae402f8e5bde0c1a491cd58273035ebbd1e28f7d06ca8635\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e917d53e5e67169c8c88f2a42a3201505192c91d1ca943e15764228afce82f42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c346dec03eef5ac4cb5ad77bacf412305a99a169d631f963739782ba1e87a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d66841bbd7bae66fc260f03847980a0f0c6c806cb7241d0f95aa215cff5edaa\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0929 22:26:49.986168 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 22:26:49.994876 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2856905692/tls.crt::/tmp/serving-cert-2856905692/tls.key\\\\\\\"\\\\nI0929 22:26:55.992881 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 22:26:56.015276 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 22:26:56.015314 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 22:26:56.015354 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 22:26:56.015364 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 22:26:56.031191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 22:26:56.031238 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 22:26:56.031245 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 22:26:56.031253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 22:26:56.031286 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 22:26:56.031295 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 22:26:56.031301 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 22:26:56.031306 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 22:26:56.034541 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea09c7778c7f96e6b389a7d6625e122f157c856bdd004b88b9a5d9653c3e3f75\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82c5bdb1a3ffb751d47eb0d810e7ba4a88ac29312454c6a39cb5d01abcb3d9b0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.619795 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.620072 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.620249 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.620525 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.620768 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.625335 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"63825dd4-7651-45d1-bc7f-3517a21912db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9eb8363599a6833625e9f0f48298b56349287f01d2acac9c643e504d359765bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fcbfdfb6b62018eac0bbb1ddb28ee26b14f23a2ba6d0e7a30bec943ab5a5f97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f3b683a1c6041e111b83a28b54d3dd55f01ff2b2cf714ab1efb0adbcb3e7e64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://38ef02558cfc0ff44fc533eba08d1dbc3769db57192d58874f11c32a189de3e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.644850 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcac64ada320040ddeb0063515345e4eb535951d8ff5357f0bb0e21ab6f3f15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.665721 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"347374f7-ade0-4434-b26d-db474c4413f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://70f1d1ef5ae3692a6307074ae864f9bb9c02da45a9f3b606ceb87cb33f6de4a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qwfjx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-pbnnm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.698139 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef991319-1ee8-4778-8567-9b4e8ff7600c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T22:27:54Z\\\",\\\"message\\\":\\\"erLBGroup\\\\\\\"}}}\\\\nI0929 22:27:54.439234 6991 services_controller.go:452] Built service openshift-ingress-operator/metrics per-node LB for network=default: []services.LB{}\\\\nI0929 22:27:54.439249 6991 services_controller.go:453] Built service openshift-ingress-operator/metrics template LB for network=default: []services.LB{}\\\\nI0929 22:27:54.439262 6991 services_controller.go:454] Service openshift-ingress-operator/metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI0929 22:27:54.439280 6991 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0929 22:27:54.439319 6991 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped alr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T22:27:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T22:26:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zjrw4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-tqsst\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.717054 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-l2k7v" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"222f81de-5954-4c27-8d86-6281bc47901f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://17c45aaf270dbe562f7dc37487c57a02cfb86dac20a77325f8b1142e1cbd23fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:27:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pj86w\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:59Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-l2k7v\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.723436 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.723666 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.723810 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.723951 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.724102 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.734607 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51c5d7b9-741c-448f-b19e-9441e62a48c6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:27:12Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-42tg6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:27:12Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-gkfvg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.754550 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"13302202-3be0-439c-9677-f54fcf844dbc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d7f2bb7f04fd86db1aa1a32ff23efc663b829b685b369fec5e342cfdbfa3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80ee8dc2502daa18454f6aba864a8fab444d3c1d891e3b42f6a9d380a9ea95bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://76970f0f8425a23902569022f74d9944eb470ac825f85f6f7cfc0080ae364547\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3be4a8c073a914695ea93dd76be8e08f8d0b7c829deab60fcf780080ba2c2c9d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.774601 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:56Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.791131 4922 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-nwkv8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d93672bf-e1a5-46d2-85af-4af1f765eb8d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T22:26:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1c8252037274e671433f0fafe73aea579e06b5cade5745e49ff7d7ad00732af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T22:26:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4gvjq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T22:26:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-nwkv8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T22:28:06Z is after 2025-08-24T17:21:41Z" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.827504 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.827573 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.827599 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.827628 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.827650 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.930458 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.930517 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.930534 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.930561 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:06 crc kubenswrapper[4922]: I0929 22:28:06.930580 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:06Z","lastTransitionTime":"2025-09-29T22:28:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.032954 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.033036 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.033056 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.033079 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.033097 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.136352 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.136758 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.136969 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.137223 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.137452 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.240559 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.240822 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.241050 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.241295 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.241509 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.344272 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.344676 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.344863 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.345069 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.345283 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.421803 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:07 crc kubenswrapper[4922]: E0929 22:28:07.422046 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.447812 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.447880 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.447898 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.447924 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.447942 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.551464 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.551526 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.551549 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.551576 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.551600 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.655136 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.655195 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.655212 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.655237 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.655254 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.758897 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.758966 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.758983 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.759008 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.759025 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.861967 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.862060 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.862078 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.862103 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.862122 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.966299 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.966364 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.966384 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.966435 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:07 crc kubenswrapper[4922]: I0929 22:28:07.966454 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:07Z","lastTransitionTime":"2025-09-29T22:28:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.069569 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.069640 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.069664 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.069697 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.069722 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:08Z","lastTransitionTime":"2025-09-29T22:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.173350 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.173434 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.173448 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.173469 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.173487 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:08Z","lastTransitionTime":"2025-09-29T22:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.276942 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.277034 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.277056 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.277085 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.277109 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:08Z","lastTransitionTime":"2025-09-29T22:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.380335 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.380418 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.380436 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.380465 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.380482 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:08Z","lastTransitionTime":"2025-09-29T22:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.423659 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.423730 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.423781 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:08 crc kubenswrapper[4922]: E0929 22:28:08.424491 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:08 crc kubenswrapper[4922]: E0929 22:28:08.424241 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:08 crc kubenswrapper[4922]: E0929 22:28:08.424600 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.443867 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.483761 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.483820 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.483838 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.483862 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.483883 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:08Z","lastTransitionTime":"2025-09-29T22:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.587280 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.587360 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.587387 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.587460 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.587482 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:08Z","lastTransitionTime":"2025-09-29T22:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.690278 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.690347 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.690371 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.690432 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.690458 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:08Z","lastTransitionTime":"2025-09-29T22:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.793675 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.793763 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.793782 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.793811 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.793832 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:08Z","lastTransitionTime":"2025-09-29T22:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.896919 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.896994 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.897017 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.897047 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:08 crc kubenswrapper[4922]: I0929 22:28:08.897069 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:08Z","lastTransitionTime":"2025-09-29T22:28:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.000942 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.001000 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.001017 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.001045 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.001064 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.104911 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.104982 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.105002 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.105030 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.105048 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.207903 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.207981 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.208005 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.208035 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.208057 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.310633 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.310705 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.310723 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.310750 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.310765 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.414361 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.414460 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.414479 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.414509 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.414528 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.420841 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:09 crc kubenswrapper[4922]: E0929 22:28:09.421051 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.422111 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:28:09 crc kubenswrapper[4922]: E0929 22:28:09.422372 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.441447 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.517577 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.517684 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.517709 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.517784 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.517849 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.620966 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.621340 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.621539 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.621699 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.621861 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.725197 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.725237 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.725255 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.725279 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.725298 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.828489 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.828583 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.828603 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.828630 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.828649 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.931477 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.931574 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.931592 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.931621 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:09 crc kubenswrapper[4922]: I0929 22:28:09.931643 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:09Z","lastTransitionTime":"2025-09-29T22:28:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.034194 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.034262 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.034280 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.034308 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.034325 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.137712 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.138653 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.138857 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.139024 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.139168 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.242496 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.242563 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.242583 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.242608 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.242626 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.346061 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.346535 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.346740 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.346898 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.347090 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.421512 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.421512 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.422058 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:10 crc kubenswrapper[4922]: E0929 22:28:10.422219 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:10 crc kubenswrapper[4922]: E0929 22:28:10.422347 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:10 crc kubenswrapper[4922]: E0929 22:28:10.422546 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.450073 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.450124 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.450144 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.450165 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.450183 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.553528 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.553603 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.553626 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.553657 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.553678 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.656453 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.656497 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.656513 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.656537 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.656554 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.758767 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.758818 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.758835 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.758857 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.758873 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.862118 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.862180 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.862197 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.862222 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.862240 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.964866 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.964930 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.965012 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.965043 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:10 crc kubenswrapper[4922]: I0929 22:28:10.965067 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:10Z","lastTransitionTime":"2025-09-29T22:28:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.067817 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.067870 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.067887 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.067910 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.067930 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.172067 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.172134 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.172152 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.172184 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.172205 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.276371 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.276481 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.276499 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.276526 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.276546 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.379891 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.379950 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.379968 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.379993 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.380012 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.421803 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:11 crc kubenswrapper[4922]: E0929 22:28:11.422015 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.483256 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.483329 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.483346 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.483371 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.483430 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.585742 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.585790 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.585807 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.585831 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.585847 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.689501 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.689552 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.689568 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.689594 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.689611 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.792580 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.792677 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.792703 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.792735 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.792761 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.895893 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.895942 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.895951 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.895968 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.895976 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.997891 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.997952 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.997975 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.998004 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:11 crc kubenswrapper[4922]: I0929 22:28:11.998025 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:11Z","lastTransitionTime":"2025-09-29T22:28:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.107313 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.107373 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.107430 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.107462 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.107483 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:12Z","lastTransitionTime":"2025-09-29T22:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.210641 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.210729 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.210754 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.210788 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.210815 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:12Z","lastTransitionTime":"2025-09-29T22:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.313790 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.313873 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.313898 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.313928 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.313949 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:12Z","lastTransitionTime":"2025-09-29T22:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.417532 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.417595 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.417611 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.417636 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.417651 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:12Z","lastTransitionTime":"2025-09-29T22:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.420864 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.420985 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:12 crc kubenswrapper[4922]: E0929 22:28:12.421161 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.421220 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:12 crc kubenswrapper[4922]: E0929 22:28:12.421536 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:12 crc kubenswrapper[4922]: E0929 22:28:12.421598 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.519927 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.519988 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.520005 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.520028 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.520045 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:12Z","lastTransitionTime":"2025-09-29T22:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.623748 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.623815 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.623840 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.623872 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.623894 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:12Z","lastTransitionTime":"2025-09-29T22:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.727230 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.727296 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.727312 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.727343 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.727365 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:12Z","lastTransitionTime":"2025-09-29T22:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.830036 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.830104 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.830130 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.830163 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.830186 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:12Z","lastTransitionTime":"2025-09-29T22:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.933747 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.933814 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.933837 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.933867 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:12 crc kubenswrapper[4922]: I0929 22:28:12.933893 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:12Z","lastTransitionTime":"2025-09-29T22:28:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.037924 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.037992 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.038011 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.038036 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.038055 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.141183 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.141236 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.141254 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.141280 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.141298 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.244793 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.244856 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.244886 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.244910 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.244930 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.348700 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.348761 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.348778 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.348804 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.348826 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.421797 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:13 crc kubenswrapper[4922]: E0929 22:28:13.422279 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.451489 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.451547 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.451566 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.451589 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.451605 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.554563 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.554631 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.554649 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.554677 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.554695 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.657708 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.657799 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.657815 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.657848 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.657866 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.760358 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.760416 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.760427 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.760444 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.760458 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.863440 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.863506 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.863523 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.863547 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.863564 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.966849 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.966906 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.966922 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.966947 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:13 crc kubenswrapper[4922]: I0929 22:28:13.966967 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:13Z","lastTransitionTime":"2025-09-29T22:28:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.070386 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.070513 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.070537 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.070567 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.070590 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:14Z","lastTransitionTime":"2025-09-29T22:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.174542 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.174643 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.174664 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.174730 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.174747 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:14Z","lastTransitionTime":"2025-09-29T22:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.278254 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.278351 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.278375 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.278453 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.278480 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:14Z","lastTransitionTime":"2025-09-29T22:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.381496 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.381567 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.381591 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.381622 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.381644 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:14Z","lastTransitionTime":"2025-09-29T22:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.421474 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.421555 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:14 crc kubenswrapper[4922]: E0929 22:28:14.421647 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.421693 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:14 crc kubenswrapper[4922]: E0929 22:28:14.421889 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:14 crc kubenswrapper[4922]: E0929 22:28:14.422006 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.484696 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.484762 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.484784 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.484812 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.484833 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:14Z","lastTransitionTime":"2025-09-29T22:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.586795 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.586853 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.586877 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.586906 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.586927 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:14Z","lastTransitionTime":"2025-09-29T22:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.682132 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.682182 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.682203 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.682326 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.682370 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:14Z","lastTransitionTime":"2025-09-29T22:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.713938 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.713983 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.713991 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.714006 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.714018 4922 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T22:28:14Z","lastTransitionTime":"2025-09-29T22:28:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.742381 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx"] Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.742953 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.745206 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.745227 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.746564 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.746751 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.820682 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-wvnl9" podStartSLOduration=77.820656986 podStartE2EDuration="1m17.820656986s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:14.819677322 +0000 UTC m=+99.129966185" watchObservedRunningTime="2025-09-29 22:28:14.820656986 +0000 UTC m=+99.130945839" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.843905 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-s5zhx" podStartSLOduration=77.843884084 podStartE2EDuration="1m17.843884084s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:14.843120915 +0000 UTC m=+99.153409768" watchObservedRunningTime="2025-09-29 22:28:14.843884084 +0000 UTC m=+99.154172937" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.858969 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/65083824-9d9d-408f-b905-66aff1602969-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.859006 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65083824-9d9d-408f-b905-66aff1602969-service-ca\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.859025 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65083824-9d9d-408f-b905-66aff1602969-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.859176 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65083824-9d9d-408f-b905-66aff1602969-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.859275 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/65083824-9d9d-408f-b905-66aff1602969-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.918276 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.918245726 podStartE2EDuration="1m18.918245726s" podCreationTimestamp="2025-09-29 22:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:14.892345891 +0000 UTC m=+99.202634744" watchObservedRunningTime="2025-09-29 22:28:14.918245726 +0000 UTC m=+99.228534579" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.918587 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=42.918573534 podStartE2EDuration="42.918573534s" podCreationTimestamp="2025-09-29 22:27:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:14.915880396 +0000 UTC m=+99.226169249" watchObservedRunningTime="2025-09-29 22:28:14.918573534 +0000 UTC m=+99.228862387" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.960669 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/65083824-9d9d-408f-b905-66aff1602969-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.960763 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/65083824-9d9d-408f-b905-66aff1602969-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.960822 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65083824-9d9d-408f-b905-66aff1602969-service-ca\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.960853 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65083824-9d9d-408f-b905-66aff1602969-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.960910 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65083824-9d9d-408f-b905-66aff1602969-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.961313 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/65083824-9d9d-408f-b905-66aff1602969-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.961384 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/65083824-9d9d-408f-b905-66aff1602969-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.962416 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65083824-9d9d-408f-b905-66aff1602969-service-ca\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.974186 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65083824-9d9d-408f-b905-66aff1602969-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:14 crc kubenswrapper[4922]: I0929 22:28:14.986555 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65083824-9d9d-408f-b905-66aff1602969-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-w4nvx\" (UID: \"65083824-9d9d-408f-b905-66aff1602969\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:15 crc kubenswrapper[4922]: I0929 22:28:15.007850 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-md9pf" podStartSLOduration=78.007836672 podStartE2EDuration="1m18.007836672s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:15.007489614 +0000 UTC m=+99.317778427" watchObservedRunningTime="2025-09-29 22:28:15.007836672 +0000 UTC m=+99.318125485" Sep 29 22:28:15 crc kubenswrapper[4922]: I0929 22:28:15.047057 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=6.047039774 podStartE2EDuration="6.047039774s" podCreationTimestamp="2025-09-29 22:28:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:15.034480677 +0000 UTC m=+99.344769490" watchObservedRunningTime="2025-09-29 22:28:15.047039774 +0000 UTC m=+99.357328587" Sep 29 22:28:15 crc kubenswrapper[4922]: I0929 22:28:15.061477 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" Sep 29 22:28:15 crc kubenswrapper[4922]: I0929 22:28:15.062214 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=79.062190678 podStartE2EDuration="1m19.062190678s" podCreationTimestamp="2025-09-29 22:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:15.047723722 +0000 UTC m=+99.358012535" watchObservedRunningTime="2025-09-29 22:28:15.062190678 +0000 UTC m=+99.372479501" Sep 29 22:28:15 crc kubenswrapper[4922]: I0929 22:28:15.113999 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podStartSLOduration=78.113985469 podStartE2EDuration="1m18.113985469s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:15.084652286 +0000 UTC m=+99.394941099" watchObservedRunningTime="2025-09-29 22:28:15.113985469 +0000 UTC m=+99.424274282" Sep 29 22:28:15 crc kubenswrapper[4922]: I0929 22:28:15.138093 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-l2k7v" podStartSLOduration=78.138066978 podStartE2EDuration="1m18.138066978s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:15.123711095 +0000 UTC m=+99.433999898" watchObservedRunningTime="2025-09-29 22:28:15.138066978 +0000 UTC m=+99.448355791" Sep 29 22:28:15 crc kubenswrapper[4922]: I0929 22:28:15.159168 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-nwkv8" podStartSLOduration=78.159142341 podStartE2EDuration="1m18.159142341s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:15.158451384 +0000 UTC m=+99.468740197" watchObservedRunningTime="2025-09-29 22:28:15.159142341 +0000 UTC m=+99.469431154" Sep 29 22:28:15 crc kubenswrapper[4922]: I0929 22:28:15.159824 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=7.159818699 podStartE2EDuration="7.159818699s" podCreationTimestamp="2025-09-29 22:28:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:15.149710343 +0000 UTC m=+99.459999176" watchObservedRunningTime="2025-09-29 22:28:15.159818699 +0000 UTC m=+99.470107512" Sep 29 22:28:15 crc kubenswrapper[4922]: I0929 22:28:15.421731 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:15 crc kubenswrapper[4922]: E0929 22:28:15.421935 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:16 crc kubenswrapper[4922]: I0929 22:28:16.012055 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" event={"ID":"65083824-9d9d-408f-b905-66aff1602969","Type":"ContainerStarted","Data":"01274f28b06309d142085de80dee0a0edd97e52b04bacec7f67237e135bf99ed"} Sep 29 22:28:16 crc kubenswrapper[4922]: I0929 22:28:16.012129 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" event={"ID":"65083824-9d9d-408f-b905-66aff1602969","Type":"ContainerStarted","Data":"cf7f6246d6e95cc09edf96fd5c57b36d34d6b9fa158b7e91058210338f59bf4a"} Sep 29 22:28:16 crc kubenswrapper[4922]: I0929 22:28:16.275580 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:16 crc kubenswrapper[4922]: E0929 22:28:16.275837 4922 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:28:16 crc kubenswrapper[4922]: E0929 22:28:16.275974 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs podName:51c5d7b9-741c-448f-b19e-9441e62a48c6 nodeName:}" failed. No retries permitted until 2025-09-29 22:29:20.275930004 +0000 UTC m=+164.586218857 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs") pod "network-metrics-daemon-gkfvg" (UID: "51c5d7b9-741c-448f-b19e-9441e62a48c6") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 22:28:16 crc kubenswrapper[4922]: I0929 22:28:16.421663 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:16 crc kubenswrapper[4922]: I0929 22:28:16.421810 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:16 crc kubenswrapper[4922]: I0929 22:28:16.423489 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:16 crc kubenswrapper[4922]: E0929 22:28:16.423479 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:16 crc kubenswrapper[4922]: E0929 22:28:16.423779 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:16 crc kubenswrapper[4922]: E0929 22:28:16.423932 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:17 crc kubenswrapper[4922]: I0929 22:28:17.421492 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:17 crc kubenswrapper[4922]: E0929 22:28:17.421679 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:18 crc kubenswrapper[4922]: I0929 22:28:18.421124 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:18 crc kubenswrapper[4922]: I0929 22:28:18.421129 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:18 crc kubenswrapper[4922]: E0929 22:28:18.421295 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:18 crc kubenswrapper[4922]: I0929 22:28:18.421329 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:18 crc kubenswrapper[4922]: E0929 22:28:18.421427 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:18 crc kubenswrapper[4922]: E0929 22:28:18.421483 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:19 crc kubenswrapper[4922]: I0929 22:28:19.421251 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:19 crc kubenswrapper[4922]: E0929 22:28:19.421410 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:20 crc kubenswrapper[4922]: I0929 22:28:20.423131 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:20 crc kubenswrapper[4922]: E0929 22:28:20.423258 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:20 crc kubenswrapper[4922]: I0929 22:28:20.423498 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:20 crc kubenswrapper[4922]: E0929 22:28:20.423556 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:20 crc kubenswrapper[4922]: I0929 22:28:20.423784 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:20 crc kubenswrapper[4922]: E0929 22:28:20.423848 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:21 crc kubenswrapper[4922]: I0929 22:28:21.421833 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:21 crc kubenswrapper[4922]: E0929 22:28:21.422472 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:21 crc kubenswrapper[4922]: I0929 22:28:21.422991 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:28:21 crc kubenswrapper[4922]: E0929 22:28:21.423253 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" Sep 29 22:28:22 crc kubenswrapper[4922]: I0929 22:28:22.421702 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:22 crc kubenswrapper[4922]: I0929 22:28:22.421722 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:22 crc kubenswrapper[4922]: E0929 22:28:22.421878 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:22 crc kubenswrapper[4922]: I0929 22:28:22.421940 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:22 crc kubenswrapper[4922]: E0929 22:28:22.422050 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:22 crc kubenswrapper[4922]: E0929 22:28:22.422173 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:23 crc kubenswrapper[4922]: I0929 22:28:23.421201 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:23 crc kubenswrapper[4922]: E0929 22:28:23.421365 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:24 crc kubenswrapper[4922]: I0929 22:28:24.421742 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:24 crc kubenswrapper[4922]: I0929 22:28:24.421783 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:24 crc kubenswrapper[4922]: E0929 22:28:24.421962 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:24 crc kubenswrapper[4922]: I0929 22:28:24.422046 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:24 crc kubenswrapper[4922]: E0929 22:28:24.422229 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:24 crc kubenswrapper[4922]: E0929 22:28:24.422458 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:25 crc kubenswrapper[4922]: I0929 22:28:25.421027 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:25 crc kubenswrapper[4922]: E0929 22:28:25.421198 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:26 crc kubenswrapper[4922]: I0929 22:28:26.421175 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:26 crc kubenswrapper[4922]: I0929 22:28:26.421278 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:26 crc kubenswrapper[4922]: E0929 22:28:26.423237 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:26 crc kubenswrapper[4922]: I0929 22:28:26.423284 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:26 crc kubenswrapper[4922]: E0929 22:28:26.423513 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:26 crc kubenswrapper[4922]: E0929 22:28:26.423818 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:27 crc kubenswrapper[4922]: I0929 22:28:27.421381 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:27 crc kubenswrapper[4922]: E0929 22:28:27.422154 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:28 crc kubenswrapper[4922]: I0929 22:28:28.421703 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:28 crc kubenswrapper[4922]: I0929 22:28:28.421768 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:28 crc kubenswrapper[4922]: E0929 22:28:28.421895 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:28 crc kubenswrapper[4922]: I0929 22:28:28.421970 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:28 crc kubenswrapper[4922]: E0929 22:28:28.422566 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:28 crc kubenswrapper[4922]: E0929 22:28:28.422769 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:29 crc kubenswrapper[4922]: I0929 22:28:29.421657 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:29 crc kubenswrapper[4922]: E0929 22:28:29.421823 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:30 crc kubenswrapper[4922]: I0929 22:28:30.421321 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:30 crc kubenswrapper[4922]: I0929 22:28:30.421383 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:30 crc kubenswrapper[4922]: I0929 22:28:30.421436 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:30 crc kubenswrapper[4922]: E0929 22:28:30.421556 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:30 crc kubenswrapper[4922]: E0929 22:28:30.421920 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:30 crc kubenswrapper[4922]: E0929 22:28:30.422031 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:31 crc kubenswrapper[4922]: I0929 22:28:31.421128 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:31 crc kubenswrapper[4922]: E0929 22:28:31.421303 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.076355 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-md9pf_6edd2cff-7363-4e99-8cc3-3db297410bce/kube-multus/1.log" Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.077294 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-md9pf_6edd2cff-7363-4e99-8cc3-3db297410bce/kube-multus/0.log" Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.077364 4922 generic.go:334] "Generic (PLEG): container finished" podID="6edd2cff-7363-4e99-8cc3-3db297410bce" containerID="940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e" exitCode=1 Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.077470 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-md9pf" event={"ID":"6edd2cff-7363-4e99-8cc3-3db297410bce","Type":"ContainerDied","Data":"940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e"} Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.077568 4922 scope.go:117] "RemoveContainer" containerID="2cfcfdc04593d4108b4fdc49c351612d522edf498f4b50a7eabc229e42bc2799" Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.079075 4922 scope.go:117] "RemoveContainer" containerID="940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e" Sep 29 22:28:32 crc kubenswrapper[4922]: E0929 22:28:32.080756 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-md9pf_openshift-multus(6edd2cff-7363-4e99-8cc3-3db297410bce)\"" pod="openshift-multus/multus-md9pf" podUID="6edd2cff-7363-4e99-8cc3-3db297410bce" Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.107748 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w4nvx" podStartSLOduration=95.107726317 podStartE2EDuration="1m35.107726317s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:16.034816192 +0000 UTC m=+100.345105045" watchObservedRunningTime="2025-09-29 22:28:32.107726317 +0000 UTC m=+116.418015160" Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.421003 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.421014 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:32 crc kubenswrapper[4922]: I0929 22:28:32.421126 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:32 crc kubenswrapper[4922]: E0929 22:28:32.421273 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:32 crc kubenswrapper[4922]: E0929 22:28:32.421520 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:32 crc kubenswrapper[4922]: E0929 22:28:32.421611 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:33 crc kubenswrapper[4922]: I0929 22:28:33.084564 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-md9pf_6edd2cff-7363-4e99-8cc3-3db297410bce/kube-multus/1.log" Sep 29 22:28:33 crc kubenswrapper[4922]: I0929 22:28:33.421876 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:33 crc kubenswrapper[4922]: E0929 22:28:33.422104 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:34 crc kubenswrapper[4922]: I0929 22:28:34.420972 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:34 crc kubenswrapper[4922]: E0929 22:28:34.421153 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:34 crc kubenswrapper[4922]: I0929 22:28:34.421195 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:34 crc kubenswrapper[4922]: I0929 22:28:34.421250 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:34 crc kubenswrapper[4922]: E0929 22:28:34.421877 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:34 crc kubenswrapper[4922]: E0929 22:28:34.422040 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:34 crc kubenswrapper[4922]: I0929 22:28:34.422302 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:28:34 crc kubenswrapper[4922]: E0929 22:28:34.422639 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-tqsst_openshift-ovn-kubernetes(ef991319-1ee8-4778-8567-9b4e8ff7600c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" Sep 29 22:28:35 crc kubenswrapper[4922]: I0929 22:28:35.420848 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:35 crc kubenswrapper[4922]: E0929 22:28:35.421048 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:36 crc kubenswrapper[4922]: E0929 22:28:36.345489 4922 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 29 22:28:36 crc kubenswrapper[4922]: I0929 22:28:36.421501 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:36 crc kubenswrapper[4922]: I0929 22:28:36.421551 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:36 crc kubenswrapper[4922]: I0929 22:28:36.421624 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:36 crc kubenswrapper[4922]: E0929 22:28:36.423362 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:36 crc kubenswrapper[4922]: E0929 22:28:36.423579 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:36 crc kubenswrapper[4922]: E0929 22:28:36.423708 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:36 crc kubenswrapper[4922]: E0929 22:28:36.551333 4922 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 22:28:37 crc kubenswrapper[4922]: I0929 22:28:37.421207 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:37 crc kubenswrapper[4922]: E0929 22:28:37.422700 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:38 crc kubenswrapper[4922]: I0929 22:28:38.421566 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:38 crc kubenswrapper[4922]: I0929 22:28:38.421649 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:38 crc kubenswrapper[4922]: I0929 22:28:38.421684 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:38 crc kubenswrapper[4922]: E0929 22:28:38.421769 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:38 crc kubenswrapper[4922]: E0929 22:28:38.421910 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:38 crc kubenswrapper[4922]: E0929 22:28:38.422426 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:39 crc kubenswrapper[4922]: I0929 22:28:39.421663 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:39 crc kubenswrapper[4922]: E0929 22:28:39.421853 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:40 crc kubenswrapper[4922]: I0929 22:28:40.421816 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:40 crc kubenswrapper[4922]: E0929 22:28:40.421972 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:40 crc kubenswrapper[4922]: I0929 22:28:40.422042 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:40 crc kubenswrapper[4922]: I0929 22:28:40.422070 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:40 crc kubenswrapper[4922]: E0929 22:28:40.422120 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:40 crc kubenswrapper[4922]: E0929 22:28:40.422300 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:41 crc kubenswrapper[4922]: I0929 22:28:41.421269 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:41 crc kubenswrapper[4922]: E0929 22:28:41.421452 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:41 crc kubenswrapper[4922]: E0929 22:28:41.552492 4922 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 22:28:42 crc kubenswrapper[4922]: I0929 22:28:42.420826 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:42 crc kubenswrapper[4922]: I0929 22:28:42.420883 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:42 crc kubenswrapper[4922]: E0929 22:28:42.421052 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:42 crc kubenswrapper[4922]: I0929 22:28:42.421112 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:42 crc kubenswrapper[4922]: E0929 22:28:42.421283 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:42 crc kubenswrapper[4922]: E0929 22:28:42.421474 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:43 crc kubenswrapper[4922]: I0929 22:28:43.421461 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:43 crc kubenswrapper[4922]: E0929 22:28:43.421666 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:44 crc kubenswrapper[4922]: I0929 22:28:44.420849 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:44 crc kubenswrapper[4922]: I0929 22:28:44.420941 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:44 crc kubenswrapper[4922]: E0929 22:28:44.421031 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:44 crc kubenswrapper[4922]: E0929 22:28:44.421149 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:44 crc kubenswrapper[4922]: I0929 22:28:44.421236 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:44 crc kubenswrapper[4922]: E0929 22:28:44.421494 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:45 crc kubenswrapper[4922]: I0929 22:28:45.421449 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:45 crc kubenswrapper[4922]: E0929 22:28:45.421637 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:46 crc kubenswrapper[4922]: I0929 22:28:46.421677 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:46 crc kubenswrapper[4922]: I0929 22:28:46.421720 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:46 crc kubenswrapper[4922]: I0929 22:28:46.421804 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:46 crc kubenswrapper[4922]: E0929 22:28:46.423304 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:46 crc kubenswrapper[4922]: E0929 22:28:46.423501 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:46 crc kubenswrapper[4922]: E0929 22:28:46.424128 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:46 crc kubenswrapper[4922]: I0929 22:28:46.424636 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:28:46 crc kubenswrapper[4922]: E0929 22:28:46.553239 4922 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 22:28:47 crc kubenswrapper[4922]: I0929 22:28:47.138550 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/3.log" Sep 29 22:28:47 crc kubenswrapper[4922]: I0929 22:28:47.142055 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerStarted","Data":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} Sep 29 22:28:47 crc kubenswrapper[4922]: I0929 22:28:47.142511 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:28:47 crc kubenswrapper[4922]: I0929 22:28:47.170890 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podStartSLOduration=110.170865065 podStartE2EDuration="1m50.170865065s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:47.170163397 +0000 UTC m=+131.480452240" watchObservedRunningTime="2025-09-29 22:28:47.170865065 +0000 UTC m=+131.481153888" Sep 29 22:28:47 crc kubenswrapper[4922]: I0929 22:28:47.420751 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:47 crc kubenswrapper[4922]: E0929 22:28:47.421025 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:47 crc kubenswrapper[4922]: I0929 22:28:47.421601 4922 scope.go:117] "RemoveContainer" containerID="940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e" Sep 29 22:28:47 crc kubenswrapper[4922]: I0929 22:28:47.479798 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gkfvg"] Sep 29 22:28:48 crc kubenswrapper[4922]: I0929 22:28:48.149721 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-md9pf_6edd2cff-7363-4e99-8cc3-3db297410bce/kube-multus/1.log" Sep 29 22:28:48 crc kubenswrapper[4922]: I0929 22:28:48.149812 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:48 crc kubenswrapper[4922]: E0929 22:28:48.149928 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:48 crc kubenswrapper[4922]: I0929 22:28:48.150086 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-md9pf" event={"ID":"6edd2cff-7363-4e99-8cc3-3db297410bce","Type":"ContainerStarted","Data":"491978895dbeae4eb647475fdf5d8fe66c207f8194037b81d47c50656591ebbc"} Sep 29 22:28:48 crc kubenswrapper[4922]: I0929 22:28:48.421476 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:48 crc kubenswrapper[4922]: I0929 22:28:48.421600 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:48 crc kubenswrapper[4922]: E0929 22:28:48.421710 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:48 crc kubenswrapper[4922]: I0929 22:28:48.421489 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:48 crc kubenswrapper[4922]: E0929 22:28:48.421867 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:48 crc kubenswrapper[4922]: E0929 22:28:48.422004 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:50 crc kubenswrapper[4922]: I0929 22:28:50.421513 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:50 crc kubenswrapper[4922]: I0929 22:28:50.421608 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:50 crc kubenswrapper[4922]: I0929 22:28:50.421714 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:50 crc kubenswrapper[4922]: E0929 22:28:50.421719 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 22:28:50 crc kubenswrapper[4922]: E0929 22:28:50.421853 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 22:28:50 crc kubenswrapper[4922]: E0929 22:28:50.421968 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-gkfvg" podUID="51c5d7b9-741c-448f-b19e-9441e62a48c6" Sep 29 22:28:50 crc kubenswrapper[4922]: I0929 22:28:50.422075 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:50 crc kubenswrapper[4922]: E0929 22:28:50.422152 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.420793 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.420877 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.420803 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.421092 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.423665 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.424084 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.424681 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.425209 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.425351 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 29 22:28:52 crc kubenswrapper[4922]: I0929 22:28:52.425369 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 29 22:28:53 crc kubenswrapper[4922]: I0929 22:28:53.952926 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.594585 4922 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.649665 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7sqx6"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.650380 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.651243 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vqqxw"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.652256 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.658465 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.658564 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.659076 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.659293 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.659465 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.659709 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.659816 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.660450 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.660513 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.662475 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-h55bj"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.663077 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.665545 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s985q"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.666455 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.666917 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lzx9z"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.668002 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.675225 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.676022 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.677156 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.677839 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.681703 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-299nl"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.682306 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.682987 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.683223 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.684891 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.690486 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.691036 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.697320 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.697325 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.697375 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.700656 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.700695 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.700860 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.701108 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.701552 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.701642 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.701709 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.702454 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.702789 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.702989 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.703581 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.703665 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.703684 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.703813 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.703863 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.703864 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.703933 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.703979 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.704569 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.708730 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.708853 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.708961 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.709002 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.709009 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.709103 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.709425 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.709459 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.709723 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.710732 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.710874 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.711040 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.711051 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jvcm2"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.711210 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.711468 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.711814 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.711902 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.712456 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.720430 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.721145 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vm59q"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.740700 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.740897 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.740971 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.741034 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.741101 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.741192 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.741259 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.741324 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.741431 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.742740 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-szc8j"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.743270 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.743631 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.743899 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.745208 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.745273 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.745352 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.745588 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.745721 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.747864 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-serving-cert\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.747906 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-images\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.747941 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.747987 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdbx7\" (UniqueName: \"kubernetes.io/projected/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-kube-api-access-rdbx7\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.748013 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.748032 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjvdr\" (UniqueName: \"kubernetes.io/projected/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-kube-api-access-rjvdr\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.748055 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-config\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.748073 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-config\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.748097 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-client-ca\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.748722 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-fq7mw"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.749087 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-p6rml"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.749103 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.749182 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.749306 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.749319 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.749558 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.752053 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.752619 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.752848 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-p6rml" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.752938 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.753825 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.754044 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.754295 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.754422 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.754538 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.754634 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.754658 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.754726 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.754849 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.754927 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.755037 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.755121 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.755309 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.755762 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.755775 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.755858 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.755938 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.756120 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rwhm5"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.756379 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.756678 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.756862 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.756987 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.758008 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.758989 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.759017 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.759126 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.759382 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.759736 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-2v5hs"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.760037 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.760082 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.762546 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.763636 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.767752 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.767760 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.767878 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.787560 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.788328 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.788650 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.789239 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.798606 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.799015 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.799122 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.800267 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.801152 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.801205 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.801607 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.801842 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.803438 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.803611 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.807728 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.808311 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.809736 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.810464 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.810669 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.811281 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.811563 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-sr875"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.811992 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.812957 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.813325 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.813406 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.813345 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.814262 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.814286 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.816303 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.816771 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.816892 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.817562 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.819466 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.820093 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.820907 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.820966 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.822817 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lzx9z"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.823738 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vqqxw"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.824629 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g9qdh"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.824993 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.828421 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-h55bj"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.828444 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.828720 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.828766 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.831074 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.831655 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q8cqh"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.832262 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xb8f7"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.832308 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.832270 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.832708 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.833707 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.838140 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s985q"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.839152 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.840121 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-szc8j"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.841105 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-6mvkp"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.841757 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.842186 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jvcm2"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.843292 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.844376 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.845381 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-p6rml"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.846729 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.847776 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-7sffz"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.850096 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.850188 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-oauth-serving-cert\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.850291 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b895eae8-d790-41fe-a942-6e0c3d478c2d-node-pullsecrets\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.850329 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaa66aca-b3cc-4908-a4c5-020719c25b94-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jkrwh\" (UID: \"eaa66aca-b3cc-4908-a4c5-020719c25b94\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.850490 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e73942c9-01ae-46bc-9fa2-d8c64727cadf-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.851853 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eecd9ce0-6b17-4527-aaad-93e50307ec8f-config\") pod \"kube-controller-manager-operator-78b949d7b-dssdr\" (UID: \"eecd9ce0-6b17-4527-aaad-93e50307ec8f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.851923 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.851972 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-audit\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852016 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/9267deb7-bbba-4e8a-923b-7be1559f83ce-etcd-service-ca\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852041 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852068 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vjps\" (UniqueName: \"kubernetes.io/projected/00301cb8-afaf-4f7d-b6c2-483a9203c794-kube-api-access-8vjps\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852090 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8277139a-08f5-442b-a0f5-c2d173f1b427-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852118 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/9267deb7-bbba-4e8a-923b-7be1559f83ce-etcd-ca\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852141 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-serving-cert\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852162 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b895eae8-d790-41fe-a942-6e0c3d478c2d-audit-dir\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852188 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/243f1207-b91f-4862-8c96-311f14da5e3b-machine-approver-tls\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852215 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-config\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852241 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-image-import-ca\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852265 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00301cb8-afaf-4f7d-b6c2-483a9203c794-serving-cert\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852290 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e73942c9-01ae-46bc-9fa2-d8c64727cadf-audit-policies\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852315 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852336 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9267deb7-bbba-4e8a-923b-7be1559f83ce-etcd-client\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852361 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/073755aa-250f-4eef-bbd8-434dbe15e772-serving-cert\") pod \"openshift-config-operator-7777fb866f-vm59q\" (UID: \"073755aa-250f-4eef-bbd8-434dbe15e772\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852425 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz4vd\" (UniqueName: \"kubernetes.io/projected/c30379d2-4644-450d-bc35-d6a4c857d840-kube-api-access-pz4vd\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852526 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e73942c9-01ae-46bc-9fa2-d8c64727cadf-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852555 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e73942c9-01ae-46bc-9fa2-d8c64727cadf-encryption-config\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852580 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00301cb8-afaf-4f7d-b6c2-483a9203c794-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852700 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852601 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e73942c9-01ae-46bc-9fa2-d8c64727cadf-etcd-client\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852804 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b895eae8-d790-41fe-a942-6e0c3d478c2d-encryption-config\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852827 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-config\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.852991 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853040 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-service-ca-bundle\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853067 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00301cb8-afaf-4f7d-b6c2-483a9203c794-service-ca-bundle\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853097 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f7lz\" (UniqueName: \"kubernetes.io/projected/a30ab676-362b-42ca-9eb0-8d42e4196078-kube-api-access-2f7lz\") pod \"downloads-7954f5f757-p6rml\" (UID: \"a30ab676-362b-42ca-9eb0-8d42e4196078\") " pod="openshift-console/downloads-7954f5f757-p6rml" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853118 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9267deb7-bbba-4e8a-923b-7be1559f83ce-serving-cert\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853142 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d8hn\" (UniqueName: \"kubernetes.io/projected/0f80dc4b-9880-42b5-97ae-8475d9087763-kube-api-access-9d8hn\") pod \"dns-operator-744455d44c-szc8j\" (UID: \"0f80dc4b-9880-42b5-97ae-8475d9087763\") " pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853164 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-config\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853187 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-images\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853212 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8277139a-08f5-442b-a0f5-c2d173f1b427-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853231 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eecd9ce0-6b17-4527-aaad-93e50307ec8f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-dssdr\" (UID: \"eecd9ce0-6b17-4527-aaad-93e50307ec8f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853253 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaa66aca-b3cc-4908-a4c5-020719c25b94-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jkrwh\" (UID: \"eaa66aca-b3cc-4908-a4c5-020719c25b94\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853280 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-default-certificate\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853304 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-oauth-config\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853325 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87141534-77ec-47c4-91e2-ac69b63b5e97-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-twpc7\" (UID: \"87141534-77ec-47c4-91e2-ac69b63b5e97\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853346 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-etcd-serving-ca\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853471 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eecd9ce0-6b17-4527-aaad-93e50307ec8f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-dssdr\" (UID: \"eecd9ce0-6b17-4527-aaad-93e50307ec8f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853556 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65kmr\" (UniqueName: \"kubernetes.io/projected/8277139a-08f5-442b-a0f5-c2d173f1b427-kube-api-access-65kmr\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853587 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09fd489c-5c74-4b35-835e-f183dc4986ae-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4mgqx\" (UID: \"09fd489c-5c74-4b35-835e-f183dc4986ae\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853611 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mfps\" (UniqueName: \"kubernetes.io/projected/e73942c9-01ae-46bc-9fa2-d8c64727cadf-kube-api-access-2mfps\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853635 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-service-ca\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853667 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/243f1207-b91f-4862-8c96-311f14da5e3b-auth-proxy-config\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853665 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-config\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853837 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9267deb7-bbba-4e8a-923b-7be1559f83ce-config\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853863 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/073755aa-250f-4eef-bbd8-434dbe15e772-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vm59q\" (UID: \"073755aa-250f-4eef-bbd8-434dbe15e772\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853886 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/243f1207-b91f-4862-8c96-311f14da5e3b-config\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853910 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-config\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853926 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-trusted-ca-bundle\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.853949 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87141534-77ec-47c4-91e2-ac69b63b5e97-config\") pod \"kube-apiserver-operator-766d6c64bb-twpc7\" (UID: \"87141534-77ec-47c4-91e2-ac69b63b5e97\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854015 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdbx7\" (UniqueName: \"kubernetes.io/projected/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-kube-api-access-rdbx7\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854056 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c30379d2-4644-450d-bc35-d6a4c857d840-serving-cert\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854098 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854139 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjvdr\" (UniqueName: \"kubernetes.io/projected/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-kube-api-access-rjvdr\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854163 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6h88\" (UniqueName: \"kubernetes.io/projected/243f1207-b91f-4862-8c96-311f14da5e3b-kube-api-access-l6h88\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854166 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-images\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854181 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854206 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854405 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b895eae8-d790-41fe-a942-6e0c3d478c2d-serving-cert\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854446 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-dir\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854469 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-client-ca\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854491 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4tp8\" (UniqueName: \"kubernetes.io/projected/eaa66aca-b3cc-4908-a4c5-020719c25b94-kube-api-access-q4tp8\") pod \"openshift-controller-manager-operator-756b6f6bc6-jkrwh\" (UID: \"eaa66aca-b3cc-4908-a4c5-020719c25b94\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854510 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbpnp\" (UniqueName: \"kubernetes.io/projected/ddedd179-84f4-4532-9d1b-eed45990a6e2-kube-api-access-dbpnp\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854534 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj76d\" (UniqueName: \"kubernetes.io/projected/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-kube-api-access-dj76d\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854554 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854574 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854598 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j5qr\" (UniqueName: \"kubernetes.io/projected/d68157f0-f55b-45bf-8288-6d0bd26f84de-kube-api-access-4j5qr\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854618 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-config\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854640 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-metrics-certs\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854661 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzz5j\" (UniqueName: \"kubernetes.io/projected/b895eae8-d790-41fe-a942-6e0c3d478c2d-kube-api-access-wzz5j\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854683 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-serving-cert\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854698 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854719 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854740 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854764 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00301cb8-afaf-4f7d-b6c2-483a9203c794-config\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854780 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-trusted-ca\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854800 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e73942c9-01ae-46bc-9fa2-d8c64727cadf-audit-dir\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854825 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-client-ca\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854847 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl67b\" (UniqueName: \"kubernetes.io/projected/09fd489c-5c74-4b35-835e-f183dc4986ae-kube-api-access-dl67b\") pod \"openshift-apiserver-operator-796bbdcf4f-4mgqx\" (UID: \"09fd489c-5c74-4b35-835e-f183dc4986ae\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854867 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8277139a-08f5-442b-a0f5-c2d173f1b427-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854888 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2fgp\" (UniqueName: \"kubernetes.io/projected/9267deb7-bbba-4e8a-923b-7be1559f83ce-kube-api-access-w2fgp\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854907 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-policies\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854927 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7drh9\" (UniqueName: \"kubernetes.io/projected/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-kube-api-access-7drh9\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854958 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854977 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-config\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.854995 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b895eae8-d790-41fe-a942-6e0c3d478c2d-etcd-client\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.855028 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-serving-cert\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.855051 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-stats-auth\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.855071 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09fd489c-5c74-4b35-835e-f183dc4986ae-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4mgqx\" (UID: \"09fd489c-5c74-4b35-835e-f183dc4986ae\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.855111 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0f80dc4b-9880-42b5-97ae-8475d9087763-metrics-tls\") pod \"dns-operator-744455d44c-szc8j\" (UID: \"0f80dc4b-9880-42b5-97ae-8475d9087763\") " pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.855309 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87141534-77ec-47c4-91e2-ac69b63b5e97-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-twpc7\" (UID: \"87141534-77ec-47c4-91e2-ac69b63b5e97\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.855364 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2kw9\" (UniqueName: \"kubernetes.io/projected/073755aa-250f-4eef-bbd8-434dbe15e772-kube-api-access-v2kw9\") pod \"openshift-config-operator-7777fb866f-vm59q\" (UID: \"073755aa-250f-4eef-bbd8-434dbe15e772\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.855408 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.855496 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e73942c9-01ae-46bc-9fa2-d8c64727cadf-serving-cert\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.855934 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-client-ca\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.856417 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-config\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.857121 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.858544 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.860761 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.861152 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.862029 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.863053 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.864099 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.865524 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-299nl"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.866255 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.867530 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.868425 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.869954 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-fq7mw"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.871255 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.872252 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7sqx6"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.873468 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-serving-cert\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.873481 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vm59q"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.873567 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.874592 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.876892 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.876916 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-sr875"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.877773 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rwhm5"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.878748 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8wfcd"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.879798 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.879810 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-tc6gr"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.880696 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-tc6gr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.881179 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-tc6gr"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.882275 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8wfcd"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.883322 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xb8f7"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.884445 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g9qdh"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.885528 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.886625 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.887745 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6mvkp"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.888766 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q8cqh"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.889857 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.890878 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt"] Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.892105 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.912535 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.933118 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.954193 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956110 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8277139a-08f5-442b-a0f5-c2d173f1b427-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956138 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2fgp\" (UniqueName: \"kubernetes.io/projected/9267deb7-bbba-4e8a-923b-7be1559f83ce-kube-api-access-w2fgp\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956162 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-policies\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956179 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7drh9\" (UniqueName: \"kubernetes.io/projected/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-kube-api-access-7drh9\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956196 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956215 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-config\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956230 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b895eae8-d790-41fe-a942-6e0c3d478c2d-etcd-client\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956252 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2kw9\" (UniqueName: \"kubernetes.io/projected/073755aa-250f-4eef-bbd8-434dbe15e772-kube-api-access-v2kw9\") pod \"openshift-config-operator-7777fb866f-vm59q\" (UID: \"073755aa-250f-4eef-bbd8-434dbe15e772\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956268 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-stats-auth\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956284 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09fd489c-5c74-4b35-835e-f183dc4986ae-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4mgqx\" (UID: \"09fd489c-5c74-4b35-835e-f183dc4986ae\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956299 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0f80dc4b-9880-42b5-97ae-8475d9087763-metrics-tls\") pod \"dns-operator-744455d44c-szc8j\" (UID: \"0f80dc4b-9880-42b5-97ae-8475d9087763\") " pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956316 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87141534-77ec-47c4-91e2-ac69b63b5e97-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-twpc7\" (UID: \"87141534-77ec-47c4-91e2-ac69b63b5e97\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956332 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e73942c9-01ae-46bc-9fa2-d8c64727cadf-serving-cert\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956347 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b895eae8-d790-41fe-a942-6e0c3d478c2d-node-pullsecrets\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956364 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956381 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-oauth-serving-cert\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956411 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eecd9ce0-6b17-4527-aaad-93e50307ec8f-config\") pod \"kube-controller-manager-operator-78b949d7b-dssdr\" (UID: \"eecd9ce0-6b17-4527-aaad-93e50307ec8f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956427 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaa66aca-b3cc-4908-a4c5-020719c25b94-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jkrwh\" (UID: \"eaa66aca-b3cc-4908-a4c5-020719c25b94\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956443 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e73942c9-01ae-46bc-9fa2-d8c64727cadf-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956468 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956483 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-audit\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956498 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956512 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/9267deb7-bbba-4e8a-923b-7be1559f83ce-etcd-service-ca\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956525 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/9267deb7-bbba-4e8a-923b-7be1559f83ce-etcd-ca\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956539 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vjps\" (UniqueName: \"kubernetes.io/projected/00301cb8-afaf-4f7d-b6c2-483a9203c794-kube-api-access-8vjps\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956557 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8277139a-08f5-442b-a0f5-c2d173f1b427-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956572 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/243f1207-b91f-4862-8c96-311f14da5e3b-machine-approver-tls\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956585 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-serving-cert\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956601 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b895eae8-d790-41fe-a942-6e0c3d478c2d-audit-dir\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956615 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-image-import-ca\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956629 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00301cb8-afaf-4f7d-b6c2-483a9203c794-serving-cert\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956646 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e73942c9-01ae-46bc-9fa2-d8c64727cadf-audit-policies\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956662 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956678 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9267deb7-bbba-4e8a-923b-7be1559f83ce-etcd-client\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956691 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/073755aa-250f-4eef-bbd8-434dbe15e772-serving-cert\") pod \"openshift-config-operator-7777fb866f-vm59q\" (UID: \"073755aa-250f-4eef-bbd8-434dbe15e772\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956707 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz4vd\" (UniqueName: \"kubernetes.io/projected/c30379d2-4644-450d-bc35-d6a4c857d840-kube-api-access-pz4vd\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956722 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e73942c9-01ae-46bc-9fa2-d8c64727cadf-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956736 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e73942c9-01ae-46bc-9fa2-d8c64727cadf-encryption-config\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956752 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-config\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956766 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00301cb8-afaf-4f7d-b6c2-483a9203c794-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956780 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e73942c9-01ae-46bc-9fa2-d8c64727cadf-etcd-client\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956796 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b895eae8-d790-41fe-a942-6e0c3d478c2d-encryption-config\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956811 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f7lz\" (UniqueName: \"kubernetes.io/projected/a30ab676-362b-42ca-9eb0-8d42e4196078-kube-api-access-2f7lz\") pod \"downloads-7954f5f757-p6rml\" (UID: \"a30ab676-362b-42ca-9eb0-8d42e4196078\") " pod="openshift-console/downloads-7954f5f757-p6rml" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956826 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-service-ca-bundle\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956841 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00301cb8-afaf-4f7d-b6c2-483a9203c794-service-ca-bundle\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956856 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-config\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956870 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9267deb7-bbba-4e8a-923b-7be1559f83ce-serving-cert\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956885 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d8hn\" (UniqueName: \"kubernetes.io/projected/0f80dc4b-9880-42b5-97ae-8475d9087763-kube-api-access-9d8hn\") pod \"dns-operator-744455d44c-szc8j\" (UID: \"0f80dc4b-9880-42b5-97ae-8475d9087763\") " pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956899 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8277139a-08f5-442b-a0f5-c2d173f1b427-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956916 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eecd9ce0-6b17-4527-aaad-93e50307ec8f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-dssdr\" (UID: \"eecd9ce0-6b17-4527-aaad-93e50307ec8f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956942 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaa66aca-b3cc-4908-a4c5-020719c25b94-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jkrwh\" (UID: \"eaa66aca-b3cc-4908-a4c5-020719c25b94\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956959 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-etcd-serving-ca\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956976 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-default-certificate\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.956991 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-oauth-config\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957006 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87141534-77ec-47c4-91e2-ac69b63b5e97-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-twpc7\" (UID: \"87141534-77ec-47c4-91e2-ac69b63b5e97\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957022 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65kmr\" (UniqueName: \"kubernetes.io/projected/8277139a-08f5-442b-a0f5-c2d173f1b427-kube-api-access-65kmr\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957036 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eecd9ce0-6b17-4527-aaad-93e50307ec8f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-dssdr\" (UID: \"eecd9ce0-6b17-4527-aaad-93e50307ec8f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957059 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09fd489c-5c74-4b35-835e-f183dc4986ae-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4mgqx\" (UID: \"09fd489c-5c74-4b35-835e-f183dc4986ae\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957073 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/243f1207-b91f-4862-8c96-311f14da5e3b-auth-proxy-config\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957089 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mfps\" (UniqueName: \"kubernetes.io/projected/e73942c9-01ae-46bc-9fa2-d8c64727cadf-kube-api-access-2mfps\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957103 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-service-ca\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957118 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9267deb7-bbba-4e8a-923b-7be1559f83ce-config\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957133 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/073755aa-250f-4eef-bbd8-434dbe15e772-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vm59q\" (UID: \"073755aa-250f-4eef-bbd8-434dbe15e772\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957149 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/243f1207-b91f-4862-8c96-311f14da5e3b-config\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957163 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-config\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957178 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-trusted-ca-bundle\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957192 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87141534-77ec-47c4-91e2-ac69b63b5e97-config\") pod \"kube-apiserver-operator-766d6c64bb-twpc7\" (UID: \"87141534-77ec-47c4-91e2-ac69b63b5e97\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957211 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c30379d2-4644-450d-bc35-d6a4c857d840-serving-cert\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957235 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6h88\" (UniqueName: \"kubernetes.io/projected/243f1207-b91f-4862-8c96-311f14da5e3b-kube-api-access-l6h88\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957256 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957270 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957287 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbpnp\" (UniqueName: \"kubernetes.io/projected/ddedd179-84f4-4532-9d1b-eed45990a6e2-kube-api-access-dbpnp\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957301 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b895eae8-d790-41fe-a942-6e0c3d478c2d-serving-cert\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957315 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-dir\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957330 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-client-ca\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957345 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4tp8\" (UniqueName: \"kubernetes.io/projected/eaa66aca-b3cc-4908-a4c5-020719c25b94-kube-api-access-q4tp8\") pod \"openshift-controller-manager-operator-756b6f6bc6-jkrwh\" (UID: \"eaa66aca-b3cc-4908-a4c5-020719c25b94\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957362 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj76d\" (UniqueName: \"kubernetes.io/projected/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-kube-api-access-dj76d\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957378 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957407 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957424 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j5qr\" (UniqueName: \"kubernetes.io/projected/d68157f0-f55b-45bf-8288-6d0bd26f84de-kube-api-access-4j5qr\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957440 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-metrics-certs\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957455 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzz5j\" (UniqueName: \"kubernetes.io/projected/b895eae8-d790-41fe-a942-6e0c3d478c2d-kube-api-access-wzz5j\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957479 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-serving-cert\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957496 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957514 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957529 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957545 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00301cb8-afaf-4f7d-b6c2-483a9203c794-config\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957560 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-trusted-ca\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957574 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e73942c9-01ae-46bc-9fa2-d8c64727cadf-audit-dir\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.957590 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl67b\" (UniqueName: \"kubernetes.io/projected/09fd489c-5c74-4b35-835e-f183dc4986ae-kube-api-access-dl67b\") pod \"openshift-apiserver-operator-796bbdcf4f-4mgqx\" (UID: \"09fd489c-5c74-4b35-835e-f183dc4986ae\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.958729 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87141534-77ec-47c4-91e2-ac69b63b5e97-config\") pod \"kube-apiserver-operator-766d6c64bb-twpc7\" (UID: \"87141534-77ec-47c4-91e2-ac69b63b5e97\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.959170 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-config\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.960216 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8277139a-08f5-442b-a0f5-c2d173f1b427-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.962582 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00301cb8-afaf-4f7d-b6c2-483a9203c794-service-ca-bundle\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.963278 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.963327 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-policies\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.963652 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-etcd-serving-ca\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.964383 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-service-ca\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.964584 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/243f1207-b91f-4862-8c96-311f14da5e3b-config\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.964922 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-config\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.965338 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9267deb7-bbba-4e8a-923b-7be1559f83ce-config\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.965477 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/243f1207-b91f-4862-8c96-311f14da5e3b-auth-proxy-config\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.965714 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/073755aa-250f-4eef-bbd8-434dbe15e772-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vm59q\" (UID: \"073755aa-250f-4eef-bbd8-434dbe15e772\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.965760 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-trusted-ca-bundle\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.966458 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-config\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.966503 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-image-import-ca\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.967712 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9267deb7-bbba-4e8a-923b-7be1559f83ce-serving-cert\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.967723 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-serving-cert\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.968136 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/e73942c9-01ae-46bc-9fa2-d8c64727cadf-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.968715 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b895eae8-d790-41fe-a942-6e0c3d478c2d-audit-dir\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.968906 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/00301cb8-afaf-4f7d-b6c2-483a9203c794-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.969278 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e73942c9-01ae-46bc-9fa2-d8c64727cadf-audit-policies\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.969634 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.970148 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09fd489c-5c74-4b35-835e-f183dc4986ae-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4mgqx\" (UID: \"09fd489c-5c74-4b35-835e-f183dc4986ae\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.970313 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.970383 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b895eae8-d790-41fe-a942-6e0c3d478c2d-node-pullsecrets\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.970809 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/00301cb8-afaf-4f7d-b6c2-483a9203c794-config\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.970867 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8277139a-08f5-442b-a0f5-c2d173f1b427-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.971148 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/243f1207-b91f-4862-8c96-311f14da5e3b-machine-approver-tls\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.971434 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-config\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.971640 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e73942c9-01ae-46bc-9fa2-d8c64727cadf-etcd-client\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.971813 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-trusted-ca\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.972013 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-dir\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.972138 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-oauth-serving-cert\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.972178 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-client-ca\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.972455 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e73942c9-01ae-46bc-9fa2-d8c64727cadf-audit-dir\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.972686 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/9267deb7-bbba-4e8a-923b-7be1559f83ce-etcd-service-ca\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.972866 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-audit\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.972905 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e73942c9-01ae-46bc-9fa2-d8c64727cadf-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.973009 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b895eae8-d790-41fe-a942-6e0c3d478c2d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.973308 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.973656 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/9267deb7-bbba-4e8a-923b-7be1559f83ce-etcd-ca\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.973909 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87141534-77ec-47c4-91e2-ac69b63b5e97-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-twpc7\" (UID: \"87141534-77ec-47c4-91e2-ac69b63b5e97\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.974537 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c30379d2-4644-450d-bc35-d6a4c857d840-serving-cert\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.974970 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e73942c9-01ae-46bc-9fa2-d8c64727cadf-serving-cert\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.975468 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.975477 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/073755aa-250f-4eef-bbd8-434dbe15e772-serving-cert\") pod \"openshift-config-operator-7777fb866f-vm59q\" (UID: \"073755aa-250f-4eef-bbd8-434dbe15e772\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.975790 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.977809 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b895eae8-d790-41fe-a942-6e0c3d478c2d-serving-cert\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.978369 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/e73942c9-01ae-46bc-9fa2-d8c64727cadf-encryption-config\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.978426 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.978518 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/9267deb7-bbba-4e8a-923b-7be1559f83ce-etcd-client\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.978533 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b895eae8-d790-41fe-a942-6e0c3d478c2d-encryption-config\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.978916 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.979334 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.979667 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-oauth-config\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.979699 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/00301cb8-afaf-4f7d-b6c2-483a9203c794-serving-cert\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.979671 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.979960 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b895eae8-d790-41fe-a942-6e0c3d478c2d-etcd-client\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.980111 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-serving-cert\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.980123 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09fd489c-5c74-4b35-835e-f183dc4986ae-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4mgqx\" (UID: \"09fd489c-5c74-4b35-835e-f183dc4986ae\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.980311 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:55 crc kubenswrapper[4922]: I0929 22:28:55.981546 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.004060 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.004282 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0f80dc4b-9880-42b5-97ae-8475d9087763-metrics-tls\") pod \"dns-operator-744455d44c-szc8j\" (UID: \"0f80dc4b-9880-42b5-97ae-8475d9087763\") " pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.013158 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.017331 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eecd9ce0-6b17-4527-aaad-93e50307ec8f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-dssdr\" (UID: \"eecd9ce0-6b17-4527-aaad-93e50307ec8f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.029746 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eecd9ce0-6b17-4527-aaad-93e50307ec8f-config\") pod \"kube-controller-manager-operator-78b949d7b-dssdr\" (UID: \"eecd9ce0-6b17-4527-aaad-93e50307ec8f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.032285 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.053482 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.073153 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.092823 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.098280 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-metrics-certs\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.112061 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.133696 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.154115 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.160168 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-default-certificate\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.173517 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.188548 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaa66aca-b3cc-4908-a4c5-020719c25b94-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-jkrwh\" (UID: \"eaa66aca-b3cc-4908-a4c5-020719c25b94\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.193163 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.204791 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-stats-auth\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.214599 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.234251 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.239124 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-service-ca-bundle\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.253342 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.263867 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaa66aca-b3cc-4908-a4c5-020719c25b94-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-jkrwh\" (UID: \"eaa66aca-b3cc-4908-a4c5-020719c25b94\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.273474 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.293820 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.352913 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.374029 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.403380 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.412851 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.432966 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.453475 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.473047 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.493989 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.513209 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.534069 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.553522 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.572805 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.593481 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.613310 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.633432 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.652914 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.672624 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.692570 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.713595 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.733519 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.753081 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.773362 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.793437 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.813718 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.831088 4922 request.go:700] Waited for 1.018873075s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmco-proxy-tls&limit=500&resourceVersion=0 Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.833793 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.852962 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.874083 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.893298 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.913000 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.933640 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.952944 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.973289 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 29 22:28:56 crc kubenswrapper[4922]: I0929 22:28:56.993508 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.013958 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.036456 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.054145 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.073433 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.093336 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.113279 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.133877 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.153118 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.172946 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.193149 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.213625 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.234066 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.253383 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.273033 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.294880 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.313186 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.333471 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.353065 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.372957 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.393380 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.424763 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.433114 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.453347 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.473965 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.492933 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.514351 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.533499 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.553854 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.599913 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdbx7\" (UniqueName: \"kubernetes.io/projected/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-kube-api-access-rdbx7\") pod \"controller-manager-879f6c89f-7sqx6\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.612946 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.624805 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjvdr\" (UniqueName: \"kubernetes.io/projected/a5b06da6-a6a0-4367-b89d-619e1dd50c4d-kube-api-access-rjvdr\") pod \"machine-api-operator-5694c8668f-vqqxw\" (UID: \"a5b06da6-a6a0-4367-b89d-619e1dd50c4d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.633990 4922 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.653666 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.674017 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.694602 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.714046 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.734166 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.779871 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl67b\" (UniqueName: \"kubernetes.io/projected/09fd489c-5c74-4b35-835e-f183dc4986ae-kube-api-access-dl67b\") pod \"openshift-apiserver-operator-796bbdcf4f-4mgqx\" (UID: \"09fd489c-5c74-4b35-835e-f183dc4986ae\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.790646 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.799428 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2fgp\" (UniqueName: \"kubernetes.io/projected/9267deb7-bbba-4e8a-923b-7be1559f83ce-kube-api-access-w2fgp\") pod \"etcd-operator-b45778765-jvcm2\" (UID: \"9267deb7-bbba-4e8a-923b-7be1559f83ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.820050 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d8hn\" (UniqueName: \"kubernetes.io/projected/0f80dc4b-9880-42b5-97ae-8475d9087763-kube-api-access-9d8hn\") pod \"dns-operator-744455d44c-szc8j\" (UID: \"0f80dc4b-9880-42b5-97ae-8475d9087763\") " pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.831765 4922 request.go:700] Waited for 1.868287971s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/serviceaccounts/router/token Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.839571 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8277139a-08f5-442b-a0f5-c2d173f1b427-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.843505 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.869628 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7drh9\" (UniqueName: \"kubernetes.io/projected/2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e-kube-api-access-7drh9\") pod \"router-default-5444994796-2v5hs\" (UID: \"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e\") " pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.888870 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j5qr\" (UniqueName: \"kubernetes.io/projected/d68157f0-f55b-45bf-8288-6d0bd26f84de-kube-api-access-4j5qr\") pod \"oauth-openshift-558db77b4-299nl\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.899799 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzz5j\" (UniqueName: \"kubernetes.io/projected/b895eae8-d790-41fe-a942-6e0c3d478c2d-kube-api-access-wzz5j\") pod \"apiserver-76f77b778f-lzx9z\" (UID: \"b895eae8-d790-41fe-a942-6e0c3d478c2d\") " pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.913796 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.924294 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mfps\" (UniqueName: \"kubernetes.io/projected/e73942c9-01ae-46bc-9fa2-d8c64727cadf-kube-api-access-2mfps\") pod \"apiserver-7bbb656c7d-mvvtp\" (UID: \"e73942c9-01ae-46bc-9fa2-d8c64727cadf\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.936636 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.941747 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vjps\" (UniqueName: \"kubernetes.io/projected/00301cb8-afaf-4f7d-b6c2-483a9203c794-kube-api-access-8vjps\") pod \"authentication-operator-69f744f599-h55bj\" (UID: \"00301cb8-afaf-4f7d-b6c2-483a9203c794\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.944438 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.949750 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.964059 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65kmr\" (UniqueName: \"kubernetes.io/projected/8277139a-08f5-442b-a0f5-c2d173f1b427-kube-api-access-65kmr\") pod \"cluster-image-registry-operator-dc59b4c8b-5mtpj\" (UID: \"8277139a-08f5-442b-a0f5-c2d173f1b427\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.973375 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/87141534-77ec-47c4-91e2-ac69b63b5e97-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-twpc7\" (UID: \"87141534-77ec-47c4-91e2-ac69b63b5e97\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.980908 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.989073 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6h88\" (UniqueName: \"kubernetes.io/projected/243f1207-b91f-4862-8c96-311f14da5e3b-kube-api-access-l6h88\") pod \"machine-approver-56656f9798-t49hc\" (UID: \"243f1207-b91f-4862-8c96-311f14da5e3b\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:57 crc kubenswrapper[4922]: I0929 22:28:57.996301 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.008654 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f7lz\" (UniqueName: \"kubernetes.io/projected/a30ab676-362b-42ca-9eb0-8d42e4196078-kube-api-access-2f7lz\") pod \"downloads-7954f5f757-p6rml\" (UID: \"a30ab676-362b-42ca-9eb0-8d42e4196078\") " pod="openshift-console/downloads-7954f5f757-p6rml" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.013418 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.032883 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.033690 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz4vd\" (UniqueName: \"kubernetes.io/projected/c30379d2-4644-450d-bc35-d6a4c857d840-kube-api-access-pz4vd\") pod \"route-controller-manager-6576b87f9c-gckct\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.052989 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/eecd9ce0-6b17-4527-aaad-93e50307ec8f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-dssdr\" (UID: \"eecd9ce0-6b17-4527-aaad-93e50307ec8f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.075406 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbpnp\" (UniqueName: \"kubernetes.io/projected/ddedd179-84f4-4532-9d1b-eed45990a6e2-kube-api-access-dbpnp\") pod \"console-f9d7485db-fq7mw\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.098988 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2kw9\" (UniqueName: \"kubernetes.io/projected/073755aa-250f-4eef-bbd8-434dbe15e772-kube-api-access-v2kw9\") pod \"openshift-config-operator-7777fb866f-vm59q\" (UID: \"073755aa-250f-4eef-bbd8-434dbe15e772\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.107206 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4tp8\" (UniqueName: \"kubernetes.io/projected/eaa66aca-b3cc-4908-a4c5-020719c25b94-kube-api-access-q4tp8\") pod \"openshift-controller-manager-operator-756b6f6bc6-jkrwh\" (UID: \"eaa66aca-b3cc-4908-a4c5-020719c25b94\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.140790 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj76d\" (UniqueName: \"kubernetes.io/projected/a7042784-d2d4-45a1-b4ea-b27ccd9791c8-kube-api-access-dj76d\") pod \"console-operator-58897d9998-s985q\" (UID: \"a7042784-d2d4-45a1-b4ea-b27ccd9791c8\") " pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.146324 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.162097 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vqqxw"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.164691 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.175560 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7sqx6"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.184379 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2v5hs" event={"ID":"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e","Type":"ContainerStarted","Data":"1e08fa93c19eea7b96c02fc3f6c66729433088f43c8564e4529eb713755434c4"} Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.188715 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.196226 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-bound-sa-token\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.196278 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.196301 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-certificates\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.196347 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-trusted-ca\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.196509 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/104a5cfb-7f2a-48d1-be00-10f698d0b552-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.196601 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:28:58.696586607 +0000 UTC m=+143.006875410 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.196718 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp2n5\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-kube-api-access-dp2n5\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.196777 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/104a5cfb-7f2a-48d1-be00-10f698d0b552-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.196799 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-tls\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: W0929 22:28:58.216733 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5b06da6_a6a0_4367_b89d_619e1dd50c4d.slice/crio-5a023b26125b48059b4a66c74cc95a616312afa10463e755edc98b128bb349d7 WatchSource:0}: Error finding container 5a023b26125b48059b4a66c74cc95a616312afa10463e755edc98b128bb349d7: Status 404 returned error can't find the container with id 5a023b26125b48059b4a66c74cc95a616312afa10463e755edc98b128bb349d7 Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.227113 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.229840 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:28:58 crc kubenswrapper[4922]: W0929 22:28:58.243551 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc68fe1bd_70ef_4d9d_8163_7eb2bd8e9abc.slice/crio-7984240d443793470514431959e6bd6c3b9305fa0af78996ddd3feb6bb57fafb WatchSource:0}: Error finding container 7984240d443793470514431959e6bd6c3b9305fa0af78996ddd3feb6bb57fafb: Status 404 returned error can't find the container with id 7984240d443793470514431959e6bd6c3b9305fa0af78996ddd3feb6bb57fafb Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.284445 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.299064 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.299269 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:28:58.799242435 +0000 UTC m=+143.109531248 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.299411 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-tls\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.299443 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dacb34ee-2394-415c-8e84-dcd07ac61cc8-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5hh74\" (UID: \"dacb34ee-2394-415c-8e84-dcd07ac61cc8\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.299492 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4jk2\" (UniqueName: \"kubernetes.io/projected/8b4f4aa2-1d81-4cd8-8355-f785c8443411-kube-api-access-m4jk2\") pod \"service-ca-operator-777779d784-bq4jt\" (UID: \"8b4f4aa2-1d81-4cd8-8355-f785c8443411\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.299518 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4-profile-collector-cert\") pod \"catalog-operator-68c6474976-9whkr\" (UID: \"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.299610 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-bound-sa-token\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.299683 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thfxz\" (UniqueName: \"kubernetes.io/projected/edadc037-767e-469d-95d1-e620b2e87d5f-kube-api-access-thfxz\") pod \"migrator-59844c95c7-89dqj\" (UID: \"edadc037-767e-469d-95d1-e620b2e87d5f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.299715 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.299810 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.300142 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:28:58.800127017 +0000 UTC m=+143.110415830 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300612 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc8kw\" (UniqueName: \"kubernetes.io/projected/37f865e7-1dae-4f10-8d6c-d021844ab7b8-kube-api-access-kc8kw\") pod \"cluster-samples-operator-665b6dd947-kx6zb\" (UID: \"37f865e7-1dae-4f10-8d6c-d021844ab7b8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300639 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b61cd9df-6474-43fc-952f-1d032f189678-signing-cabundle\") pod \"service-ca-9c57cc56f-g9qdh\" (UID: \"b61cd9df-6474-43fc-952f-1d032f189678\") " pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300661 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b4f4aa2-1d81-4cd8-8355-f785c8443411-config\") pod \"service-ca-operator-777779d784-bq4jt\" (UID: \"8b4f4aa2-1d81-4cd8-8355-f785c8443411\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300704 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b457a47-b41f-403f-bdf5-28cd26520dff-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5chmq\" (UID: \"4b457a47-b41f-403f-bdf5-28cd26520dff\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300723 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgsgq\" (UniqueName: \"kubernetes.io/projected/4b457a47-b41f-403f-bdf5-28cd26520dff-kube-api-access-jgsgq\") pod \"kube-storage-version-migrator-operator-b67b599dd-5chmq\" (UID: \"4b457a47-b41f-403f-bdf5-28cd26520dff\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300768 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-certificates\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300795 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-trusted-ca\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300850 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6916d599-759f-47bc-a87f-dc01ce91aed0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q8cqh\" (UID: \"6916d599-759f-47bc-a87f-dc01ce91aed0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300874 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/620d277e-b41f-4876-9b64-577a5a4f149e-cert\") pod \"ingress-canary-tc6gr\" (UID: \"620d277e-b41f-4876-9b64-577a5a4f149e\") " pod="openshift-ingress-canary/ingress-canary-tc6gr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300909 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4dba6962-b548-45c2-9197-4071b577c2f5-webhook-cert\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300950 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cwm2\" (UniqueName: \"kubernetes.io/projected/6916d599-759f-47bc-a87f-dc01ce91aed0-kube-api-access-8cwm2\") pod \"multus-admission-controller-857f4d67dd-q8cqh\" (UID: \"6916d599-759f-47bc-a87f-dc01ce91aed0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300967 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcg8c\" (UniqueName: \"kubernetes.io/projected/4009a9bc-41a7-4ddb-acda-11944ba3eb2d-kube-api-access-tcg8c\") pod \"machine-config-controller-84d6567774-4j4lc\" (UID: \"4009a9bc-41a7-4ddb-acda-11944ba3eb2d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.300986 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn2jg\" (UniqueName: \"kubernetes.io/projected/257397de-bb75-4f16-93d0-d516ea1938ff-kube-api-access-cn2jg\") pod \"machine-config-server-7sffz\" (UID: \"257397de-bb75-4f16-93d0-d516ea1938ff\") " pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301020 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/69f43bae-138c-406c-83de-8145bd743c82-metrics-tls\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301053 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsghg\" (UniqueName: \"kubernetes.io/projected/e5866976-5fbc-455a-bfe9-f58b6e8b58ae-kube-api-access-fsghg\") pod \"package-server-manager-789f6589d5-vr4md\" (UID: \"e5866976-5fbc-455a-bfe9-f58b6e8b58ae\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301068 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4009a9bc-41a7-4ddb-acda-11944ba3eb2d-proxy-tls\") pod \"machine-config-controller-84d6567774-4j4lc\" (UID: \"4009a9bc-41a7-4ddb-acda-11944ba3eb2d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301140 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/deaead36-f205-4377-938d-53c8f82d5c03-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-57bhd\" (UID: \"deaead36-f205-4377-938d-53c8f82d5c03\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301184 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-csi-data-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301253 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-plugins-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301277 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmwkx\" (UniqueName: \"kubernetes.io/projected/3c5d1982-8fe9-4776-83a4-2bd856394360-kube-api-access-lmwkx\") pod \"dns-default-6mvkp\" (UID: \"3c5d1982-8fe9-4776-83a4-2bd856394360\") " pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301292 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4499e978-6c8b-4d19-98da-9067d3d01ad8-config-volume\") pod \"collect-profiles-29319735-w4qgb\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301451 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8qln\" (UniqueName: \"kubernetes.io/projected/3c05c4fe-136b-402a-a35a-f91147e07150-kube-api-access-m8qln\") pod \"control-plane-machine-set-operator-78cbb6b69f-ts4x9\" (UID: \"3c05c4fe-136b-402a-a35a-f91147e07150\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301531 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/3c05c4fe-136b-402a-a35a-f91147e07150-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ts4x9\" (UID: \"3c05c4fe-136b-402a-a35a-f91147e07150\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301552 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4009a9bc-41a7-4ddb-acda-11944ba3eb2d-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4j4lc\" (UID: \"4009a9bc-41a7-4ddb-acda-11944ba3eb2d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301649 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x54jd\" (UniqueName: \"kubernetes.io/projected/dacb34ee-2394-415c-8e84-dcd07ac61cc8-kube-api-access-x54jd\") pod \"olm-operator-6b444d44fb-5hh74\" (UID: \"dacb34ee-2394-415c-8e84-dcd07ac61cc8\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301722 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dacb34ee-2394-415c-8e84-dcd07ac61cc8-srv-cert\") pod \"olm-operator-6b444d44fb-5hh74\" (UID: \"dacb34ee-2394-415c-8e84-dcd07ac61cc8\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301967 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp2n5\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-kube-api-access-dp2n5\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.301994 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j89xc\" (UniqueName: \"kubernetes.io/projected/620d277e-b41f-4876-9b64-577a5a4f149e-kube-api-access-j89xc\") pod \"ingress-canary-tc6gr\" (UID: \"620d277e-b41f-4876-9b64-577a5a4f149e\") " pod="openshift-ingress-canary/ingress-canary-tc6gr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302017 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b61cd9df-6474-43fc-952f-1d032f189678-signing-key\") pod \"service-ca-9c57cc56f-g9qdh\" (UID: \"b61cd9df-6474-43fc-952f-1d032f189678\") " pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302036 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-registration-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302056 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-mountpoint-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302075 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4dba6962-b548-45c2-9197-4071b577c2f5-tmpfs\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302113 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/deaead36-f205-4377-938d-53c8f82d5c03-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-57bhd\" (UID: \"deaead36-f205-4377-938d-53c8f82d5c03\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302147 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck8kg\" (UniqueName: \"kubernetes.io/projected/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-kube-api-access-ck8kg\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302188 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/104a5cfb-7f2a-48d1-be00-10f698d0b552-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302206 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-socket-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302226 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xb8f7\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302264 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/257397de-bb75-4f16-93d0-d516ea1938ff-certs\") pod \"machine-config-server-7sffz\" (UID: \"257397de-bb75-4f16-93d0-d516ea1938ff\") " pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302284 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/69f43bae-138c-406c-83de-8145bd743c82-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302533 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-images\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302597 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-auth-proxy-config\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302624 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p9w2\" (UniqueName: \"kubernetes.io/projected/4dba6962-b548-45c2-9197-4071b577c2f5-kube-api-access-2p9w2\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302641 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/69f43bae-138c-406c-83de-8145bd743c82-trusted-ca\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302660 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/deaead36-f205-4377-938d-53c8f82d5c03-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-57bhd\" (UID: \"deaead36-f205-4377-938d-53c8f82d5c03\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302676 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b457a47-b41f-403f-bdf5-28cd26520dff-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5chmq\" (UID: \"4b457a47-b41f-403f-bdf5-28cd26520dff\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302694 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e5866976-5fbc-455a-bfe9-f58b6e8b58ae-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vr4md\" (UID: \"e5866976-5fbc-455a-bfe9-f58b6e8b58ae\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302742 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lds4h\" (UniqueName: \"kubernetes.io/projected/d51b12db-7451-4cf2-bf6e-a156e2654342-kube-api-access-lds4h\") pod \"marketplace-operator-79b997595-xb8f7\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302766 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3c5d1982-8fe9-4776-83a4-2bd856394360-config-volume\") pod \"dns-default-6mvkp\" (UID: \"3c5d1982-8fe9-4776-83a4-2bd856394360\") " pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302782 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b4f4aa2-1d81-4cd8-8355-f785c8443411-serving-cert\") pod \"service-ca-operator-777779d784-bq4jt\" (UID: \"8b4f4aa2-1d81-4cd8-8355-f785c8443411\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302796 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4-srv-cert\") pod \"catalog-operator-68c6474976-9whkr\" (UID: \"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302812 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3c5d1982-8fe9-4776-83a4-2bd856394360-metrics-tls\") pod \"dns-default-6mvkp\" (UID: \"3c5d1982-8fe9-4776-83a4-2bd856394360\") " pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302828 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4499e978-6c8b-4d19-98da-9067d3d01ad8-secret-volume\") pod \"collect-profiles-29319735-w4qgb\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302875 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/104a5cfb-7f2a-48d1-be00-10f698d0b552-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302895 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/257397de-bb75-4f16-93d0-d516ea1938ff-node-bootstrap-token\") pod \"machine-config-server-7sffz\" (UID: \"257397de-bb75-4f16-93d0-d516ea1938ff\") " pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.302927 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9n62\" (UniqueName: \"kubernetes.io/projected/c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4-kube-api-access-c9n62\") pod \"catalog-operator-68c6474976-9whkr\" (UID: \"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303082 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4dba6962-b548-45c2-9197-4071b577c2f5-apiservice-cert\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303138 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xb8f7\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303187 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kxc6\" (UniqueName: \"kubernetes.io/projected/b61cd9df-6474-43fc-952f-1d032f189678-kube-api-access-8kxc6\") pod \"service-ca-9c57cc56f-g9qdh\" (UID: \"b61cd9df-6474-43fc-952f-1d032f189678\") " pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303224 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-proxy-tls\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303239 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4j29\" (UniqueName: \"kubernetes.io/projected/4499e978-6c8b-4d19-98da-9067d3d01ad8-kube-api-access-x4j29\") pod \"collect-profiles-29319735-w4qgb\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303312 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj4jw\" (UniqueName: \"kubernetes.io/projected/8928157b-0f62-4952-96ef-9a0a8f543682-kube-api-access-mj4jw\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303323 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-trusted-ca\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303347 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-certificates\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303366 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/37f865e7-1dae-4f10-8d6c-d021844ab7b8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-kx6zb\" (UID: \"37f865e7-1dae-4f10-8d6c-d021844ab7b8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.303409 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4qb6\" (UniqueName: \"kubernetes.io/projected/69f43bae-138c-406c-83de-8145bd743c82-kube-api-access-f4qb6\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.305998 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/104a5cfb-7f2a-48d1-be00-10f698d0b552-installation-pull-secrets\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.306090 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-p6rml" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.306301 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/104a5cfb-7f2a-48d1-be00-10f698d0b552-ca-trust-extracted\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.310885 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-tls\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.320108 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.347243 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.354011 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-bound-sa-token\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.372252 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp2n5\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-kube-api-access-dp2n5\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.405609 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.405742 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/257397de-bb75-4f16-93d0-d516ea1938ff-node-bootstrap-token\") pod \"machine-config-server-7sffz\" (UID: \"257397de-bb75-4f16-93d0-d516ea1938ff\") " pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.405763 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9n62\" (UniqueName: \"kubernetes.io/projected/c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4-kube-api-access-c9n62\") pod \"catalog-operator-68c6474976-9whkr\" (UID: \"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.405781 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4dba6962-b548-45c2-9197-4071b577c2f5-apiservice-cert\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.405825 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xb8f7\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.405854 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kxc6\" (UniqueName: \"kubernetes.io/projected/b61cd9df-6474-43fc-952f-1d032f189678-kube-api-access-8kxc6\") pod \"service-ca-9c57cc56f-g9qdh\" (UID: \"b61cd9df-6474-43fc-952f-1d032f189678\") " pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.405872 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-proxy-tls\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.405887 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4j29\" (UniqueName: \"kubernetes.io/projected/4499e978-6c8b-4d19-98da-9067d3d01ad8-kube-api-access-x4j29\") pod \"collect-profiles-29319735-w4qgb\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407662 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj4jw\" (UniqueName: \"kubernetes.io/projected/8928157b-0f62-4952-96ef-9a0a8f543682-kube-api-access-mj4jw\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407687 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/37f865e7-1dae-4f10-8d6c-d021844ab7b8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-kx6zb\" (UID: \"37f865e7-1dae-4f10-8d6c-d021844ab7b8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407702 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4qb6\" (UniqueName: \"kubernetes.io/projected/69f43bae-138c-406c-83de-8145bd743c82-kube-api-access-f4qb6\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407728 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dacb34ee-2394-415c-8e84-dcd07ac61cc8-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5hh74\" (UID: \"dacb34ee-2394-415c-8e84-dcd07ac61cc8\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407743 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4jk2\" (UniqueName: \"kubernetes.io/projected/8b4f4aa2-1d81-4cd8-8355-f785c8443411-kube-api-access-m4jk2\") pod \"service-ca-operator-777779d784-bq4jt\" (UID: \"8b4f4aa2-1d81-4cd8-8355-f785c8443411\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407768 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4-profile-collector-cert\") pod \"catalog-operator-68c6474976-9whkr\" (UID: \"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407784 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thfxz\" (UniqueName: \"kubernetes.io/projected/edadc037-767e-469d-95d1-e620b2e87d5f-kube-api-access-thfxz\") pod \"migrator-59844c95c7-89dqj\" (UID: \"edadc037-767e-469d-95d1-e620b2e87d5f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407809 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgsgq\" (UniqueName: \"kubernetes.io/projected/4b457a47-b41f-403f-bdf5-28cd26520dff-kube-api-access-jgsgq\") pod \"kube-storage-version-migrator-operator-b67b599dd-5chmq\" (UID: \"4b457a47-b41f-403f-bdf5-28cd26520dff\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407830 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc8kw\" (UniqueName: \"kubernetes.io/projected/37f865e7-1dae-4f10-8d6c-d021844ab7b8-kube-api-access-kc8kw\") pod \"cluster-samples-operator-665b6dd947-kx6zb\" (UID: \"37f865e7-1dae-4f10-8d6c-d021844ab7b8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407846 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b61cd9df-6474-43fc-952f-1d032f189678-signing-cabundle\") pod \"service-ca-9c57cc56f-g9qdh\" (UID: \"b61cd9df-6474-43fc-952f-1d032f189678\") " pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407860 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b4f4aa2-1d81-4cd8-8355-f785c8443411-config\") pod \"service-ca-operator-777779d784-bq4jt\" (UID: \"8b4f4aa2-1d81-4cd8-8355-f785c8443411\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407875 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b457a47-b41f-403f-bdf5-28cd26520dff-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5chmq\" (UID: \"4b457a47-b41f-403f-bdf5-28cd26520dff\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407893 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/620d277e-b41f-4876-9b64-577a5a4f149e-cert\") pod \"ingress-canary-tc6gr\" (UID: \"620d277e-b41f-4876-9b64-577a5a4f149e\") " pod="openshift-ingress-canary/ingress-canary-tc6gr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407910 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6916d599-759f-47bc-a87f-dc01ce91aed0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q8cqh\" (UID: \"6916d599-759f-47bc-a87f-dc01ce91aed0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407925 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn2jg\" (UniqueName: \"kubernetes.io/projected/257397de-bb75-4f16-93d0-d516ea1938ff-kube-api-access-cn2jg\") pod \"machine-config-server-7sffz\" (UID: \"257397de-bb75-4f16-93d0-d516ea1938ff\") " pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407940 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4dba6962-b548-45c2-9197-4071b577c2f5-webhook-cert\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407954 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cwm2\" (UniqueName: \"kubernetes.io/projected/6916d599-759f-47bc-a87f-dc01ce91aed0-kube-api-access-8cwm2\") pod \"multus-admission-controller-857f4d67dd-q8cqh\" (UID: \"6916d599-759f-47bc-a87f-dc01ce91aed0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407971 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcg8c\" (UniqueName: \"kubernetes.io/projected/4009a9bc-41a7-4ddb-acda-11944ba3eb2d-kube-api-access-tcg8c\") pod \"machine-config-controller-84d6567774-4j4lc\" (UID: \"4009a9bc-41a7-4ddb-acda-11944ba3eb2d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.407987 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/69f43bae-138c-406c-83de-8145bd743c82-metrics-tls\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408005 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsghg\" (UniqueName: \"kubernetes.io/projected/e5866976-5fbc-455a-bfe9-f58b6e8b58ae-kube-api-access-fsghg\") pod \"package-server-manager-789f6589d5-vr4md\" (UID: \"e5866976-5fbc-455a-bfe9-f58b6e8b58ae\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408018 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4009a9bc-41a7-4ddb-acda-11944ba3eb2d-proxy-tls\") pod \"machine-config-controller-84d6567774-4j4lc\" (UID: \"4009a9bc-41a7-4ddb-acda-11944ba3eb2d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408033 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/deaead36-f205-4377-938d-53c8f82d5c03-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-57bhd\" (UID: \"deaead36-f205-4377-938d-53c8f82d5c03\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408051 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-csi-data-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408068 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-plugins-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408084 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmwkx\" (UniqueName: \"kubernetes.io/projected/3c5d1982-8fe9-4776-83a4-2bd856394360-kube-api-access-lmwkx\") pod \"dns-default-6mvkp\" (UID: \"3c5d1982-8fe9-4776-83a4-2bd856394360\") " pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408102 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4499e978-6c8b-4d19-98da-9067d3d01ad8-config-volume\") pod \"collect-profiles-29319735-w4qgb\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408129 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8qln\" (UniqueName: \"kubernetes.io/projected/3c05c4fe-136b-402a-a35a-f91147e07150-kube-api-access-m8qln\") pod \"control-plane-machine-set-operator-78cbb6b69f-ts4x9\" (UID: \"3c05c4fe-136b-402a-a35a-f91147e07150\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408154 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/3c05c4fe-136b-402a-a35a-f91147e07150-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ts4x9\" (UID: \"3c05c4fe-136b-402a-a35a-f91147e07150\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408172 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x54jd\" (UniqueName: \"kubernetes.io/projected/dacb34ee-2394-415c-8e84-dcd07ac61cc8-kube-api-access-x54jd\") pod \"olm-operator-6b444d44fb-5hh74\" (UID: \"dacb34ee-2394-415c-8e84-dcd07ac61cc8\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408191 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4009a9bc-41a7-4ddb-acda-11944ba3eb2d-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4j4lc\" (UID: \"4009a9bc-41a7-4ddb-acda-11944ba3eb2d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408206 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dacb34ee-2394-415c-8e84-dcd07ac61cc8-srv-cert\") pod \"olm-operator-6b444d44fb-5hh74\" (UID: \"dacb34ee-2394-415c-8e84-dcd07ac61cc8\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408222 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j89xc\" (UniqueName: \"kubernetes.io/projected/620d277e-b41f-4876-9b64-577a5a4f149e-kube-api-access-j89xc\") pod \"ingress-canary-tc6gr\" (UID: \"620d277e-b41f-4876-9b64-577a5a4f149e\") " pod="openshift-ingress-canary/ingress-canary-tc6gr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408237 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b61cd9df-6474-43fc-952f-1d032f189678-signing-key\") pod \"service-ca-9c57cc56f-g9qdh\" (UID: \"b61cd9df-6474-43fc-952f-1d032f189678\") " pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408255 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-registration-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408270 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/deaead36-f205-4377-938d-53c8f82d5c03-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-57bhd\" (UID: \"deaead36-f205-4377-938d-53c8f82d5c03\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408284 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-mountpoint-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408298 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4dba6962-b548-45c2-9197-4071b577c2f5-tmpfs\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408312 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck8kg\" (UniqueName: \"kubernetes.io/projected/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-kube-api-access-ck8kg\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408334 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/257397de-bb75-4f16-93d0-d516ea1938ff-certs\") pod \"machine-config-server-7sffz\" (UID: \"257397de-bb75-4f16-93d0-d516ea1938ff\") " pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408349 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-socket-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408362 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xb8f7\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408381 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/69f43bae-138c-406c-83de-8145bd743c82-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408420 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-images\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408436 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-auth-proxy-config\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408451 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/69f43bae-138c-406c-83de-8145bd743c82-trusted-ca\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408466 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p9w2\" (UniqueName: \"kubernetes.io/projected/4dba6962-b548-45c2-9197-4071b577c2f5-kube-api-access-2p9w2\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408480 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/deaead36-f205-4377-938d-53c8f82d5c03-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-57bhd\" (UID: \"deaead36-f205-4377-938d-53c8f82d5c03\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408496 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b457a47-b41f-403f-bdf5-28cd26520dff-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5chmq\" (UID: \"4b457a47-b41f-403f-bdf5-28cd26520dff\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408511 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e5866976-5fbc-455a-bfe9-f58b6e8b58ae-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vr4md\" (UID: \"e5866976-5fbc-455a-bfe9-f58b6e8b58ae\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408531 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lds4h\" (UniqueName: \"kubernetes.io/projected/d51b12db-7451-4cf2-bf6e-a156e2654342-kube-api-access-lds4h\") pod \"marketplace-operator-79b997595-xb8f7\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408546 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4-srv-cert\") pod \"catalog-operator-68c6474976-9whkr\" (UID: \"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408561 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3c5d1982-8fe9-4776-83a4-2bd856394360-config-volume\") pod \"dns-default-6mvkp\" (UID: \"3c5d1982-8fe9-4776-83a4-2bd856394360\") " pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.408725 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:28:58.908696355 +0000 UTC m=+143.218985168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.409941 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b457a47-b41f-403f-bdf5-28cd26520dff-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5chmq\" (UID: \"4b457a47-b41f-403f-bdf5-28cd26520dff\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.412975 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b4f4aa2-1d81-4cd8-8355-f785c8443411-config\") pod \"service-ca-operator-777779d784-bq4jt\" (UID: \"8b4f4aa2-1d81-4cd8-8355-f785c8443411\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.413173 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/69f43bae-138c-406c-83de-8145bd743c82-metrics-tls\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.413333 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-csi-data-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.413716 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-plugins-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.414103 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b61cd9df-6474-43fc-952f-1d032f189678-signing-cabundle\") pod \"service-ca-9c57cc56f-g9qdh\" (UID: \"b61cd9df-6474-43fc-952f-1d032f189678\") " pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.415108 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4499e978-6c8b-4d19-98da-9067d3d01ad8-config-volume\") pod \"collect-profiles-29319735-w4qgb\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.415122 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-images\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.415496 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4dba6962-b548-45c2-9197-4071b577c2f5-apiservice-cert\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.408576 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b4f4aa2-1d81-4cd8-8355-f785c8443411-serving-cert\") pod \"service-ca-operator-777779d784-bq4jt\" (UID: \"8b4f4aa2-1d81-4cd8-8355-f785c8443411\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.415933 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3c5d1982-8fe9-4776-83a4-2bd856394360-metrics-tls\") pod \"dns-default-6mvkp\" (UID: \"3c5d1982-8fe9-4776-83a4-2bd856394360\") " pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.416014 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4499e978-6c8b-4d19-98da-9067d3d01ad8-secret-volume\") pod \"collect-profiles-29319735-w4qgb\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.416107 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/257397de-bb75-4f16-93d0-d516ea1938ff-certs\") pod \"machine-config-server-7sffz\" (UID: \"257397de-bb75-4f16-93d0-d516ea1938ff\") " pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.416125 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4009a9bc-41a7-4ddb-acda-11944ba3eb2d-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4j4lc\" (UID: \"4009a9bc-41a7-4ddb-acda-11944ba3eb2d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.416699 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-xb8f7\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.417314 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-auth-proxy-config\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.417379 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-socket-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.417828 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3c5d1982-8fe9-4776-83a4-2bd856394360-config-volume\") pod \"dns-default-6mvkp\" (UID: \"3c5d1982-8fe9-4776-83a4-2bd856394360\") " pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.418475 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6916d599-759f-47bc-a87f-dc01ce91aed0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q8cqh\" (UID: \"6916d599-759f-47bc-a87f-dc01ce91aed0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.418537 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/69f43bae-138c-406c-83de-8145bd743c82-trusted-ca\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.418575 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4-profile-collector-cert\") pod \"catalog-operator-68c6474976-9whkr\" (UID: \"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.419338 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/deaead36-f205-4377-938d-53c8f82d5c03-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-57bhd\" (UID: \"deaead36-f205-4377-938d-53c8f82d5c03\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.419707 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-registration-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.420673 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-proxy-tls\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.420908 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/257397de-bb75-4f16-93d0-d516ea1938ff-node-bootstrap-token\") pod \"machine-config-server-7sffz\" (UID: \"257397de-bb75-4f16-93d0-d516ea1938ff\") " pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.421083 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4009a9bc-41a7-4ddb-acda-11944ba3eb2d-proxy-tls\") pod \"machine-config-controller-84d6567774-4j4lc\" (UID: \"4009a9bc-41a7-4ddb-acda-11944ba3eb2d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.421201 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/3c05c4fe-136b-402a-a35a-f91147e07150-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-ts4x9\" (UID: \"3c05c4fe-136b-402a-a35a-f91147e07150\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.421263 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b61cd9df-6474-43fc-952f-1d032f189678-signing-key\") pod \"service-ca-9c57cc56f-g9qdh\" (UID: \"b61cd9df-6474-43fc-952f-1d032f189678\") " pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.422879 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4-srv-cert\") pod \"catalog-operator-68c6474976-9whkr\" (UID: \"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.424852 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/37f865e7-1dae-4f10-8d6c-d021844ab7b8-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-kx6zb\" (UID: \"37f865e7-1dae-4f10-8d6c-d021844ab7b8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.425276 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/dacb34ee-2394-415c-8e84-dcd07ac61cc8-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5hh74\" (UID: \"dacb34ee-2394-415c-8e84-dcd07ac61cc8\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.425956 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-xb8f7\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.426017 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8928157b-0f62-4952-96ef-9a0a8f543682-mountpoint-dir\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.426733 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4b457a47-b41f-403f-bdf5-28cd26520dff-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5chmq\" (UID: \"4b457a47-b41f-403f-bdf5-28cd26520dff\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.427332 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3c5d1982-8fe9-4776-83a4-2bd856394360-metrics-tls\") pod \"dns-default-6mvkp\" (UID: \"3c5d1982-8fe9-4776-83a4-2bd856394360\") " pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.428828 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/620d277e-b41f-4876-9b64-577a5a4f149e-cert\") pod \"ingress-canary-tc6gr\" (UID: \"620d277e-b41f-4876-9b64-577a5a4f149e\") " pod="openshift-ingress-canary/ingress-canary-tc6gr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.430901 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4dba6962-b548-45c2-9197-4071b577c2f5-webhook-cert\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.430919 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4499e978-6c8b-4d19-98da-9067d3d01ad8-secret-volume\") pod \"collect-profiles-29319735-w4qgb\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.431033 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e5866976-5fbc-455a-bfe9-f58b6e8b58ae-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vr4md\" (UID: \"e5866976-5fbc-455a-bfe9-f58b6e8b58ae\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.431171 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b4f4aa2-1d81-4cd8-8355-f785c8443411-serving-cert\") pod \"service-ca-operator-777779d784-bq4jt\" (UID: \"8b4f4aa2-1d81-4cd8-8355-f785c8443411\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.431325 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/dacb34ee-2394-415c-8e84-dcd07ac61cc8-srv-cert\") pod \"olm-operator-6b444d44fb-5hh74\" (UID: \"dacb34ee-2394-415c-8e84-dcd07ac61cc8\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.434181 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/deaead36-f205-4377-938d-53c8f82d5c03-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-57bhd\" (UID: \"deaead36-f205-4377-938d-53c8f82d5c03\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.436275 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kxc6\" (UniqueName: \"kubernetes.io/projected/b61cd9df-6474-43fc-952f-1d032f189678-kube-api-access-8kxc6\") pod \"service-ca-9c57cc56f-g9qdh\" (UID: \"b61cd9df-6474-43fc-952f-1d032f189678\") " pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.444316 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4dba6962-b548-45c2-9197-4071b577c2f5-tmpfs\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.446481 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9n62\" (UniqueName: \"kubernetes.io/projected/c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4-kube-api-access-c9n62\") pod \"catalog-operator-68c6474976-9whkr\" (UID: \"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.466064 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.471126 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmwkx\" (UniqueName: \"kubernetes.io/projected/3c5d1982-8fe9-4776-83a4-2bd856394360-kube-api-access-lmwkx\") pod \"dns-default-6mvkp\" (UID: \"3c5d1982-8fe9-4776-83a4-2bd856394360\") " pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.490133 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcg8c\" (UniqueName: \"kubernetes.io/projected/4009a9bc-41a7-4ddb-acda-11944ba3eb2d-kube-api-access-tcg8c\") pod \"machine-config-controller-84d6567774-4j4lc\" (UID: \"4009a9bc-41a7-4ddb-acda-11944ba3eb2d\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.492259 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-szc8j"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.500007 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.517917 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.518251 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.018239687 +0000 UTC m=+143.328528500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.519895 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/deaead36-f205-4377-938d-53c8f82d5c03-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-57bhd\" (UID: \"deaead36-f205-4377-938d-53c8f82d5c03\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.537035 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.537322 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsghg\" (UniqueName: \"kubernetes.io/projected/e5866976-5fbc-455a-bfe9-f58b6e8b58ae-kube-api-access-fsghg\") pod \"package-server-manager-789f6589d5-vr4md\" (UID: \"e5866976-5fbc-455a-bfe9-f58b6e8b58ae\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.541759 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-299nl"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.542187 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-jvcm2"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.545747 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lzx9z"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.546152 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-6mvkp" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.547100 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj4jw\" (UniqueName: \"kubernetes.io/projected/8928157b-0f62-4952-96ef-9a0a8f543682-kube-api-access-mj4jw\") pod \"csi-hostpathplugin-8wfcd\" (UID: \"8928157b-0f62-4952-96ef-9a0a8f543682\") " pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.547345 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.559683 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s985q"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.572993 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thfxz\" (UniqueName: \"kubernetes.io/projected/edadc037-767e-469d-95d1-e620b2e87d5f-kube-api-access-thfxz\") pod \"migrator-59844c95c7-89dqj\" (UID: \"edadc037-767e-469d-95d1-e620b2e87d5f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.579862 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.582569 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.590580 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgsgq\" (UniqueName: \"kubernetes.io/projected/4b457a47-b41f-403f-bdf5-28cd26520dff-kube-api-access-jgsgq\") pod \"kube-storage-version-migrator-operator-b67b599dd-5chmq\" (UID: \"4b457a47-b41f-403f-bdf5-28cd26520dff\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.611308 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc8kw\" (UniqueName: \"kubernetes.io/projected/37f865e7-1dae-4f10-8d6c-d021844ab7b8-kube-api-access-kc8kw\") pod \"cluster-samples-operator-665b6dd947-kx6zb\" (UID: \"37f865e7-1dae-4f10-8d6c-d021844ab7b8\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.619144 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.619315 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.119289914 +0000 UTC m=+143.429578727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.619441 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.619977 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.119970181 +0000 UTC m=+143.430258994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.631863 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p9w2\" (UniqueName: \"kubernetes.io/projected/4dba6962-b548-45c2-9197-4071b577c2f5-kube-api-access-2p9w2\") pod \"packageserver-d55dfcdfc-cw6jc\" (UID: \"4dba6962-b548-45c2-9197-4071b577c2f5\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.632014 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.647375 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4jk2\" (UniqueName: \"kubernetes.io/projected/8b4f4aa2-1d81-4cd8-8355-f785c8443411-kube-api-access-m4jk2\") pod \"service-ca-operator-777779d784-bq4jt\" (UID: \"8b4f4aa2-1d81-4cd8-8355-f785c8443411\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.647673 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.654934 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.655716 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.671316 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j89xc\" (UniqueName: \"kubernetes.io/projected/620d277e-b41f-4876-9b64-577a5a4f149e-kube-api-access-j89xc\") pod \"ingress-canary-tc6gr\" (UID: \"620d277e-b41f-4876-9b64-577a5a4f149e\") " pod="openshift-ingress-canary/ingress-canary-tc6gr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.678150 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.705646 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn2jg\" (UniqueName: \"kubernetes.io/projected/257397de-bb75-4f16-93d0-d516ea1938ff-kube-api-access-cn2jg\") pod \"machine-config-server-7sffz\" (UID: \"257397de-bb75-4f16-93d0-d516ea1938ff\") " pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.720427 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.720727 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.220703651 +0000 UTC m=+143.530992474 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.726793 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x54jd\" (UniqueName: \"kubernetes.io/projected/dacb34ee-2394-415c-8e84-dcd07ac61cc8-kube-api-access-x54jd\") pod \"olm-operator-6b444d44fb-5hh74\" (UID: \"dacb34ee-2394-415c-8e84-dcd07ac61cc8\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.727034 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.737726 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.738994 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-p6rml"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.740075 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-h55bj"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.745003 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8qln\" (UniqueName: \"kubernetes.io/projected/3c05c4fe-136b-402a-a35a-f91147e07150-kube-api-access-m8qln\") pod \"control-plane-machine-set-operator-78cbb6b69f-ts4x9\" (UID: \"3c05c4fe-136b-402a-a35a-f91147e07150\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.753115 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.771471 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.773314 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/69f43bae-138c-406c-83de-8145bd743c82-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.777930 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.784658 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.785475 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cwm2\" (UniqueName: \"kubernetes.io/projected/6916d599-759f-47bc-a87f-dc01ce91aed0-kube-api-access-8cwm2\") pod \"multus-admission-controller-857f4d67dd-q8cqh\" (UID: \"6916d599-759f-47bc-a87f-dc01ce91aed0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.791882 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.804820 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4qb6\" (UniqueName: \"kubernetes.io/projected/69f43bae-138c-406c-83de-8145bd743c82-kube-api-access-f4qb6\") pod \"ingress-operator-5b745b69d9-rrwxb\" (UID: \"69f43bae-138c-406c-83de-8145bd743c82\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.818763 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.822729 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.823000 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.322988309 +0000 UTC m=+143.633277122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.835301 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.836058 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck8kg\" (UniqueName: \"kubernetes.io/projected/8a112806-91a3-4b3a-9c4d-74a8f723e5ce-kube-api-access-ck8kg\") pod \"machine-config-operator-74547568cd-sr875\" (UID: \"8a112806-91a3-4b3a-9c4d-74a8f723e5ce\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.861987 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-7sffz" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.863198 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.865469 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lds4h\" (UniqueName: \"kubernetes.io/projected/d51b12db-7451-4cf2-bf6e-a156e2654342-kube-api-access-lds4h\") pod \"marketplace-operator-79b997595-xb8f7\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.880044 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4j29\" (UniqueName: \"kubernetes.io/projected/4499e978-6c8b-4d19-98da-9067d3d01ad8-kube-api-access-x4j29\") pod \"collect-profiles-29319735-w4qgb\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.887888 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-fq7mw"] Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.888937 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-tc6gr" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.913142 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.913181 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.927288 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:58 crc kubenswrapper[4922]: E0929 22:28:58.927639 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.427624627 +0000 UTC m=+143.737913440 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:58 crc kubenswrapper[4922]: W0929 22:28:58.985259 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddedd179_84f4_4532_9d1b_eed45990a6e2.slice/crio-75c00409f353e8c0fcbc48768a638440ae85c8db15fddf1f137a0b11d5423d4a WatchSource:0}: Error finding container 75c00409f353e8c0fcbc48768a638440ae85c8db15fddf1f137a0b11d5423d4a: Status 404 returned error can't find the container with id 75c00409f353e8c0fcbc48768a638440ae85c8db15fddf1f137a0b11d5423d4a Sep 29 22:28:58 crc kubenswrapper[4922]: I0929 22:28:58.992660 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vm59q"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.001855 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g9qdh"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.006660 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.014198 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.030255 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.030636 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.530620234 +0000 UTC m=+143.840909047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.045512 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.110572 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.131540 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.131639 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.63162508 +0000 UTC m=+143.941913893 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.131877 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.132155 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.632148743 +0000 UTC m=+143.942437556 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.138027 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:28:59 crc kubenswrapper[4922]: W0929 22:28:59.186642 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5a8b1f1_bdd5_4f64_a85e_81c8541cb1a4.slice/crio-0383460d64febbb094ebb6ad07f4ca0a64313a25571dcea4030ef31527b654b1 WatchSource:0}: Error finding container 0383460d64febbb094ebb6ad07f4ca0a64313a25571dcea4030ef31527b654b1: Status 404 returned error can't find the container with id 0383460d64febbb094ebb6ad07f4ca0a64313a25571dcea4030ef31527b654b1 Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.207258 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8wfcd"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.225548 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-6mvkp"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.232860 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.233171 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.733157129 +0000 UTC m=+144.043445942 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.243405 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" event={"ID":"9267deb7-bbba-4e8a-923b-7be1559f83ce","Type":"ContainerStarted","Data":"48b57b0394664c306b21a8551aa431828a67b6a31f3c87e9376c8af896ec1bf2"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.252304 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" event={"ID":"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc","Type":"ContainerStarted","Data":"7c9e93d5db38ee545214c0080870b9c33a58f1b0719e9e49cb2830160116864e"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.252347 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" event={"ID":"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc","Type":"ContainerStarted","Data":"7984240d443793470514431959e6bd6c3b9305fa0af78996ddd3feb6bb57fafb"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.253187 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.255764 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" event={"ID":"87141534-77ec-47c4-91e2-ac69b63b5e97","Type":"ContainerStarted","Data":"037f23f98acd355557c51f9164b8105d29bb00a61b919f9ef24d7e49c5adc46a"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.257133 4922 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7sqx6 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.257186 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" podUID="c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.263477 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" event={"ID":"073755aa-250f-4eef-bbd8-434dbe15e772","Type":"ContainerStarted","Data":"f131f20b2d40d92b79a6e9c3961249116800c6c2f1fd95c89eaefa25727c7c83"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.266178 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s985q" event={"ID":"a7042784-d2d4-45a1-b4ea-b27ccd9791c8","Type":"ContainerStarted","Data":"b4fa1e39e5fdd16170ef6edaf20ddfc08ba3b46250a14e15739dfbdf75dd929a"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.267306 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fq7mw" event={"ID":"ddedd179-84f4-4532-9d1b-eed45990a6e2","Type":"ContainerStarted","Data":"75c00409f353e8c0fcbc48768a638440ae85c8db15fddf1f137a0b11d5423d4a"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.277971 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" event={"ID":"00301cb8-afaf-4f7d-b6c2-483a9203c794","Type":"ContainerStarted","Data":"ddbe09e192d559df3e494dd7d5071eb3dfeffc3f6c116f67f6a227bf377493a6"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.297490 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.301355 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" event={"ID":"0f80dc4b-9880-42b5-97ae-8475d9087763","Type":"ContainerStarted","Data":"dac95e198dc6dc5516455042a1feec63e50c306767055ffd62d1374794e42c2c"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.311144 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" event={"ID":"243f1207-b91f-4862-8c96-311f14da5e3b","Type":"ContainerStarted","Data":"e76e2b633ee53b0ea839e4159d7e5de2c192101ea1f16c02c0abc3a10d00b300"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.311177 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" event={"ID":"243f1207-b91f-4862-8c96-311f14da5e3b","Type":"ContainerStarted","Data":"8db8947f2abfb6b8146a2996ee4fba441f911c7c22eb02509e69a68a9ef43f8e"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.314331 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" event={"ID":"e73942c9-01ae-46bc-9fa2-d8c64727cadf","Type":"ContainerStarted","Data":"25e9a3d72154b7985738b40e029813ab3a356a62cfe0bafe6b3ab94f2cd6b277"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.320465 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-2v5hs" event={"ID":"2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e","Type":"ContainerStarted","Data":"a06fd55370c1c5dd81333b94c30a85d25ebd372fe8abb49936e7d9f6a9f2729d"} Sep 29 22:28:59 crc kubenswrapper[4922]: W0929 22:28:59.327669 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8928157b_0f62_4952_96ef_9a0a8f543682.slice/crio-aef1406a2927cca2b24bffef63db8891ae1d548bd57f898e430200cf43aae658 WatchSource:0}: Error finding container aef1406a2927cca2b24bffef63db8891ae1d548bd57f898e430200cf43aae658: Status 404 returned error can't find the container with id aef1406a2927cca2b24bffef63db8891ae1d548bd57f898e430200cf43aae658 Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.331966 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" event={"ID":"8277139a-08f5-442b-a0f5-c2d173f1b427","Type":"ContainerStarted","Data":"035ad8fc48371d2402c52a961ca113b4fe958b6a1b7183888dd78bb781f8fe77"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.333755 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.335029 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.835013207 +0000 UTC m=+144.145302110 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.342729 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" event={"ID":"c30379d2-4644-450d-bc35-d6a4c857d840","Type":"ContainerStarted","Data":"b26589c7bc8306c4f4850f3939889b8f1ed53bf70ee21b808cf5b9bcd51d6096"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.351660 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.372902 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-p6rml" event={"ID":"a30ab676-362b-42ca-9eb0-8d42e4196078","Type":"ContainerStarted","Data":"2727e9cb3ff928d0b31988982fa0362c59b12cfd307d28fcdab0f062ce602433"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.375095 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" event={"ID":"b895eae8-d790-41fe-a942-6e0c3d478c2d","Type":"ContainerStarted","Data":"f6c1b509745ba198b579546f25cbee47c495278742946b7cf620497b13b3377a"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.377868 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.385853 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" event={"ID":"d68157f0-f55b-45bf-8288-6d0bd26f84de","Type":"ContainerStarted","Data":"03cdb0d6cc843ac4e298e670819bdc47aebf8a32403546ff3211db37232c5baf"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.401173 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" event={"ID":"eaa66aca-b3cc-4908-a4c5-020719c25b94","Type":"ContainerStarted","Data":"61044887bb3e0ee447dff4da20640eb5cb43e8c85a4ac7577ded47f6d23df2dc"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.401451 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.404524 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" event={"ID":"09fd489c-5c74-4b35-835e-f183dc4986ae","Type":"ContainerStarted","Data":"cb4fc921fc4944e04ade84baca83f797c4255345ef119b4415f82f2a3e6333c9"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.412714 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" event={"ID":"a5b06da6-a6a0-4367-b89d-619e1dd50c4d","Type":"ContainerStarted","Data":"2955df267ff532f0a8e18f4794d41342850ca349ed53126cb6917629d776ec7b"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.412749 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" event={"ID":"a5b06da6-a6a0-4367-b89d-619e1dd50c4d","Type":"ContainerStarted","Data":"5a023b26125b48059b4a66c74cc95a616312afa10463e755edc98b128bb349d7"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.416780 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" event={"ID":"eecd9ce0-6b17-4527-aaad-93e50307ec8f","Type":"ContainerStarted","Data":"a7ad92d1b343ab549baf20e2ca4adf80d3222e2bc567bcf0523435617ae187dc"} Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.435155 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.436829 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd"] Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.436899 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:28:59.936885045 +0000 UTC m=+144.247173858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: W0929 22:28:59.453079 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddacb34ee_2394_415c_8e84_dcd07ac61cc8.slice/crio-3c0df1eb696a7409294d38a9144990a43144c22527a91875b6f1b11e08d84463 WatchSource:0}: Error finding container 3c0df1eb696a7409294d38a9144990a43144c22527a91875b6f1b11e08d84463: Status 404 returned error can't find the container with id 3c0df1eb696a7409294d38a9144990a43144c22527a91875b6f1b11e08d84463 Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.529908 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" podStartSLOduration=122.529890739 podStartE2EDuration="2m2.529890739s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:59.527344444 +0000 UTC m=+143.837633257" watchObservedRunningTime="2025-09-29 22:28:59.529890739 +0000 UTC m=+143.840179552" Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.537372 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.537800 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.037786399 +0000 UTC m=+144.348075312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: W0929 22:28:59.548999 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddeaead36_f205_4377_938d_53c8f82d5c03.slice/crio-d15cbaece937bddf96fff58b6f8124c826b6c046d7fdbd097c167d63a3b56dee WatchSource:0}: Error finding container d15cbaece937bddf96fff58b6f8124c826b6c046d7fdbd097c167d63a3b56dee: Status 404 returned error can't find the container with id d15cbaece937bddf96fff58b6f8124c826b6c046d7fdbd097c167d63a3b56dee Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.638434 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.638748 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.138734503 +0000 UTC m=+144.449023316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.650127 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc"] Sep 29 22:28:59 crc kubenswrapper[4922]: W0929 22:28:59.682549 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod257397de_bb75_4f16_93d0_d516ea1938ff.slice/crio-73674ee87aaf1570c481d3cb9e3c4ec8dc0481c6b90c043ca935f670a4885461 WatchSource:0}: Error finding container 73674ee87aaf1570c481d3cb9e3c4ec8dc0481c6b90c043ca935f670a4885461: Status 404 returned error can't find the container with id 73674ee87aaf1570c481d3cb9e3c4ec8dc0481c6b90c043ca935f670a4885461 Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.694753 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.730440 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md"] Sep 29 22:28:59 crc kubenswrapper[4922]: W0929 22:28:59.731774 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4dba6962_b548_45c2_9197_4071b577c2f5.slice/crio-816b060ff0a7f28f0a4c8eb658f01a8a6029449edd04dd00022b46f27808f467 WatchSource:0}: Error finding container 816b060ff0a7f28f0a4c8eb658f01a8a6029449edd04dd00022b46f27808f467: Status 404 returned error can't find the container with id 816b060ff0a7f28f0a4c8eb658f01a8a6029449edd04dd00022b46f27808f467 Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.741413 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.741754 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.24174209 +0000 UTC m=+144.552030903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.796673 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-sr875"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.820234 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-2v5hs" podStartSLOduration=122.820221146 podStartE2EDuration="2m2.820221146s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:28:59.818888113 +0000 UTC m=+144.129176926" watchObservedRunningTime="2025-09-29 22:28:59.820221146 +0000 UTC m=+144.130509959" Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.828762 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.843002 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.843318 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.34330494 +0000 UTC m=+144.653593753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.919852 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.928243 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb"] Sep 29 22:28:59 crc kubenswrapper[4922]: I0929 22:28:59.945210 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:28:59 crc kubenswrapper[4922]: E0929 22:28:59.945914 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.445904057 +0000 UTC m=+144.756192870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: W0929 22:29:00.028077 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b4f4aa2_1d81_4cd8_8355_f785c8443411.slice/crio-746314a89260f68838ef779309f4e861b6ca657b51e891e2d0389654784272cb WatchSource:0}: Error finding container 746314a89260f68838ef779309f4e861b6ca657b51e891e2d0389654784272cb: Status 404 returned error can't find the container with id 746314a89260f68838ef779309f4e861b6ca657b51e891e2d0389654784272cb Sep 29 22:29:00 crc kubenswrapper[4922]: W0929 22:29:00.029092 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69f43bae_138c_406c_83de_8145bd743c82.slice/crio-a1d8aa4f7bd06ff8df9c82b6a47b7a6b0c1f2458ab2945ae4d04c7ad284938bb WatchSource:0}: Error finding container a1d8aa4f7bd06ff8df9c82b6a47b7a6b0c1f2458ab2945ae4d04c7ad284938bb: Status 404 returned error can't find the container with id a1d8aa4f7bd06ff8df9c82b6a47b7a6b0c1f2458ab2945ae4d04c7ad284938bb Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.033954 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.042213 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:00 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:00 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:00 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.042267 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:00 crc kubenswrapper[4922]: W0929 22:29:00.042909 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a112806_91a3_4b3a_9c4d_74a8f723e5ce.slice/crio-c2c63b5356184b7f110a056a43b8f5d7e93951fae532b846689fe8c7ec002031 WatchSource:0}: Error finding container c2c63b5356184b7f110a056a43b8f5d7e93951fae532b846689fe8c7ec002031: Status 404 returned error can't find the container with id c2c63b5356184b7f110a056a43b8f5d7e93951fae532b846689fe8c7ec002031 Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.055125 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.055529 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.555514021 +0000 UTC m=+144.865802834 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.103013 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xb8f7"] Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.156146 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.157943 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.657925153 +0000 UTC m=+144.968213986 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.198660 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-tc6gr"] Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.204595 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb"] Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.217580 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q8cqh"] Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.256751 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.256934 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.756917938 +0000 UTC m=+145.067206751 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.256985 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.257277 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.757269177 +0000 UTC m=+145.067557990 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: W0929 22:29:00.355592 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod620d277e_b41f_4876_9b64_577a5a4f149e.slice/crio-1cc27529237202ad980bf11b6a1506fb71c34f6c11b2c45d0a0b9d36421df5d9 WatchSource:0}: Error finding container 1cc27529237202ad980bf11b6a1506fb71c34f6c11b2c45d0a0b9d36421df5d9: Status 404 returned error can't find the container with id 1cc27529237202ad980bf11b6a1506fb71c34f6c11b2c45d0a0b9d36421df5d9 Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.358603 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.358946 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.858932249 +0000 UTC m=+145.169221062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.460552 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.460872 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:00.960859019 +0000 UTC m=+145.271147832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.474462 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" event={"ID":"8928157b-0f62-4952-96ef-9a0a8f543682","Type":"ContainerStarted","Data":"aef1406a2927cca2b24bffef63db8891ae1d548bd57f898e430200cf43aae658"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.489053 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" event={"ID":"87141534-77ec-47c4-91e2-ac69b63b5e97","Type":"ContainerStarted","Data":"3eb204d8c77ab807f284f9359c2547008346f756f6fad7bfbf1092c20f3292d4"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.506042 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" event={"ID":"243f1207-b91f-4862-8c96-311f14da5e3b","Type":"ContainerStarted","Data":"e0a0605b26623864244f292ad7780ef57462162039f52a5b3d7fde7b159cde39"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.517723 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" event={"ID":"deaead36-f205-4377-938d-53c8f82d5c03","Type":"ContainerStarted","Data":"d15cbaece937bddf96fff58b6f8124c826b6c046d7fdbd097c167d63a3b56dee"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.540340 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" event={"ID":"d68157f0-f55b-45bf-8288-6d0bd26f84de","Type":"ContainerStarted","Data":"07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.541095 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.552325 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" event={"ID":"8b4f4aa2-1d81-4cd8-8355-f785c8443411","Type":"ContainerStarted","Data":"746314a89260f68838ef779309f4e861b6ca657b51e891e2d0389654784272cb"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.561119 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.562305 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-tc6gr" event={"ID":"620d277e-b41f-4876-9b64-577a5a4f149e","Type":"ContainerStarted","Data":"1cc27529237202ad980bf11b6a1506fb71c34f6c11b2c45d0a0b9d36421df5d9"} Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.562533 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.062515051 +0000 UTC m=+145.372803864 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.564302 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" event={"ID":"e5866976-5fbc-455a-bfe9-f58b6e8b58ae","Type":"ContainerStarted","Data":"c81a25d33fdda5f4431a5e44c0d3a59c712fea0160934f3ea95631b762a81314"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.569413 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" event={"ID":"dacb34ee-2394-415c-8e84-dcd07ac61cc8","Type":"ContainerStarted","Data":"3c0df1eb696a7409294d38a9144990a43144c22527a91875b6f1b11e08d84463"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.572849 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.582901 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" event={"ID":"b61cd9df-6474-43fc-952f-1d032f189678","Type":"ContainerStarted","Data":"53a84e274ac9607f93c9e3d5b6753e590318a4164fc1262f1c2c70ce0c4ea745"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.582937 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" event={"ID":"b61cd9df-6474-43fc-952f-1d032f189678","Type":"ContainerStarted","Data":"666fe593bcb0e2fa709166e3742dd71ca2297f4fa867f4b7828f9218f214cac4"} Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.588277 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.088237442 +0000 UTC m=+145.398526255 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.627229 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" event={"ID":"09fd489c-5c74-4b35-835e-f183dc4986ae","Type":"ContainerStarted","Data":"a1f6d72752f69ad7f597c0a1834dfb64e0648b4ac099752aa634242f47e3708c"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.641905 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" event={"ID":"4b457a47-b41f-403f-bdf5-28cd26520dff","Type":"ContainerStarted","Data":"816b8d10521ac0e1aed87ed7dec38c158bc48303e6422120ba43353b4b249eaf"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.641935 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" event={"ID":"4b457a47-b41f-403f-bdf5-28cd26520dff","Type":"ContainerStarted","Data":"d4795a475458016218e6d74cf818223b82da307c7af4cd653f9a35d93ee9bcae"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.661359 4922 generic.go:334] "Generic (PLEG): container finished" podID="b895eae8-d790-41fe-a942-6e0c3d478c2d" containerID="72a8c57df78a2b8fe868c8390c93ae88613b1e2f09701e4bcd8b94e6736f60cd" exitCode=0 Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.661430 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" event={"ID":"b895eae8-d790-41fe-a942-6e0c3d478c2d","Type":"ContainerDied","Data":"72a8c57df78a2b8fe868c8390c93ae88613b1e2f09701e4bcd8b94e6736f60cd"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.671559 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" event={"ID":"c30379d2-4644-450d-bc35-d6a4c857d840","Type":"ContainerStarted","Data":"23eb23f4044a103569ddce0d2391f49494ca9a625ee490562e49f9168e8ec0ee"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.672488 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.673483 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.673758 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.173740676 +0000 UTC m=+145.484029489 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.675628 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" event={"ID":"69f43bae-138c-406c-83de-8145bd743c82","Type":"ContainerStarted","Data":"a1d8aa4f7bd06ff8df9c82b6a47b7a6b0c1f2458ab2945ae4d04c7ad284938bb"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.686279 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" event={"ID":"eecd9ce0-6b17-4527-aaad-93e50307ec8f","Type":"ContainerStarted","Data":"6f1be2c1d96b7303c22c0f10c32f6d981988fe146aedc84a0b7f4c7a26a50956"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.693558 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.748678 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" event={"ID":"4499e978-6c8b-4d19-98da-9067d3d01ad8","Type":"ContainerStarted","Data":"f397cdee4f2c37f67584f4cecc032a4d68cb2dee3d8f4f13b1f8577597bb6864"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.760586 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" event={"ID":"6916d599-759f-47bc-a87f-dc01ce91aed0","Type":"ContainerStarted","Data":"892ac3d66a6f0624d4e893c56247664afa177b46953e16ccac27b388a3de29c7"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.770488 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s985q" event={"ID":"a7042784-d2d4-45a1-b4ea-b27ccd9791c8","Type":"ContainerStarted","Data":"fb19cda02de7cd5a64fef49ec79a33f34cf0ab6fcee0eed4e86616054c03c793"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.771134 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.775234 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.777455 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.277439831 +0000 UTC m=+145.587728644 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.786463 4922 patch_prober.go:28] interesting pod/console-operator-58897d9998-s985q container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.786549 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-s985q" podUID="a7042784-d2d4-45a1-b4ea-b27ccd9791c8" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.802355 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.822283 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" event={"ID":"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4","Type":"ContainerStarted","Data":"b4d71916112ebe0a4b9ef48f2fcb3268315d6b4a72d519dcd807f9be2af28493"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.822330 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" event={"ID":"c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4","Type":"ContainerStarted","Data":"0383460d64febbb094ebb6ad07f4ca0a64313a25571dcea4030ef31527b654b1"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.823035 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.824154 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" podStartSLOduration=123.824139892 podStartE2EDuration="2m3.824139892s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:00.82325863 +0000 UTC m=+145.133547443" watchObservedRunningTime="2025-09-29 22:29:00.824139892 +0000 UTC m=+145.134428705" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.824329 4922 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-9whkr container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.824360 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" podUID="c5a8b1f1-bdd5-4f64-a85e-81c8541cb1a4" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.833710 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" event={"ID":"d51b12db-7451-4cf2-bf6e-a156e2654342","Type":"ContainerStarted","Data":"bebbccf830b64ce19db7cc9ff52a7312d22947bef4fc70c50abeb49028546c86"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.838288 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" event={"ID":"eaa66aca-b3cc-4908-a4c5-020719c25b94","Type":"ContainerStarted","Data":"97b559390ad07e391a476d29d3f6661dbf0c994aa1a15ac1c61f1ae2db122f2b"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.850271 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" event={"ID":"00301cb8-afaf-4f7d-b6c2-483a9203c794","Type":"ContainerStarted","Data":"5a45b068cb560509a2cf5a480401fdd0ec410b1e5a70ab7edc4645eb27be1bad"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.856379 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4mgqx" podStartSLOduration=124.856361808 podStartE2EDuration="2m4.856361808s" podCreationTimestamp="2025-09-29 22:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:00.852962862 +0000 UTC m=+145.163251675" watchObservedRunningTime="2025-09-29 22:29:00.856361808 +0000 UTC m=+145.166650611" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.858579 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" event={"ID":"8a112806-91a3-4b3a-9c4d-74a8f723e5ce","Type":"ContainerStarted","Data":"c2c63b5356184b7f110a056a43b8f5d7e93951fae532b846689fe8c7ec002031"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.880115 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" event={"ID":"073755aa-250f-4eef-bbd8-434dbe15e772","Type":"ContainerStarted","Data":"c9c25aa97ff2cfe0e0c3fb416ebe872a474324780c093cf71c3d5566db86dcb0"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.880612 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.881511 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.381496154 +0000 UTC m=+145.691784967 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.953546 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj" event={"ID":"edadc037-767e-469d-95d1-e620b2e87d5f","Type":"ContainerStarted","Data":"e2d53ab642b5ddecda52e816394c2251f6311e6cb7eb1bb725f15e621ad2feb4"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.953595 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj" event={"ID":"edadc037-767e-469d-95d1-e620b2e87d5f","Type":"ContainerStarted","Data":"263be4c4d70eaa83087d1c889e2415db7911ffe5d0d8283cbbc5bfbf728df6e4"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.957090 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-t49hc" podStartSLOduration=124.957068726 podStartE2EDuration="2m4.957068726s" podCreationTimestamp="2025-09-29 22:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:00.954136282 +0000 UTC m=+145.264425095" watchObservedRunningTime="2025-09-29 22:29:00.957068726 +0000 UTC m=+145.267357539" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.958362 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" event={"ID":"4dba6962-b548-45c2-9197-4071b577c2f5","Type":"ContainerStarted","Data":"816b060ff0a7f28f0a4c8eb658f01a8a6029449edd04dd00022b46f27808f467"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.959249 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.960149 4922 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cw6jc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" start-of-body= Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.960181 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" podUID="4dba6962-b548-45c2-9197-4071b577c2f5" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.978729 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" event={"ID":"a5b06da6-a6a0-4367-b89d-619e1dd50c4d","Type":"ContainerStarted","Data":"bc702bcbc6d17236d8b6a86d58000276c2d0533fd052ab6371c4a64adcc8d15c"} Sep 29 22:29:00 crc kubenswrapper[4922]: I0929 22:29:00.981749 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:00 crc kubenswrapper[4922]: E0929 22:29:00.982059 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.482046068 +0000 UTC m=+145.792334881 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.042348 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:01 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:01 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:01 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.042688 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.045075 4922 generic.go:334] "Generic (PLEG): container finished" podID="e73942c9-01ae-46bc-9fa2-d8c64727cadf" containerID="8c5dd09f46f7d320dcff7fa6ea3471fda2d1b76aeb03926c18a6e5265c0d52a0" exitCode=0 Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.045305 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" event={"ID":"e73942c9-01ae-46bc-9fa2-d8c64727cadf","Type":"ContainerDied","Data":"8c5dd09f46f7d320dcff7fa6ea3471fda2d1b76aeb03926c18a6e5265c0d52a0"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.059018 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6mvkp" event={"ID":"3c5d1982-8fe9-4776-83a4-2bd856394360","Type":"ContainerStarted","Data":"0522ed32230a2f8b0aee90cdda61840217ef8f85a66880aad9fba024bd2dcd3f"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.059062 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6mvkp" event={"ID":"3c5d1982-8fe9-4776-83a4-2bd856394360","Type":"ContainerStarted","Data":"9cad21b291b72c14f3e61306716b559cbf342caa8080d267f9100b853d19c99f"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.060737 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-g9qdh" podStartSLOduration=124.06072614 podStartE2EDuration="2m4.06072614s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.055519508 +0000 UTC m=+145.365808311" watchObservedRunningTime="2025-09-29 22:29:01.06072614 +0000 UTC m=+145.371014953" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.082216 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.083383 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.583365503 +0000 UTC m=+145.893654316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.093128 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-twpc7" podStartSLOduration=124.093109019 podStartE2EDuration="2m4.093109019s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.077811092 +0000 UTC m=+145.388099905" watchObservedRunningTime="2025-09-29 22:29:01.093109019 +0000 UTC m=+145.403397832" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.100037 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" event={"ID":"0f80dc4b-9880-42b5-97ae-8475d9087763","Type":"ContainerStarted","Data":"32ed3eb826e23443160f71d50cd859b69171edd534285c7e0aeba4c5d4c46155"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.177243 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-s985q" podStartSLOduration=124.177229938 podStartE2EDuration="2m4.177229938s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.176069369 +0000 UTC m=+145.486358172" watchObservedRunningTime="2025-09-29 22:29:01.177229938 +0000 UTC m=+145.487518751" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.181310 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-dssdr" podStartSLOduration=124.181290991 podStartE2EDuration="2m4.181290991s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.128400182 +0000 UTC m=+145.438688995" watchObservedRunningTime="2025-09-29 22:29:01.181290991 +0000 UTC m=+145.491579804" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.178724 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" event={"ID":"37f865e7-1dae-4f10-8d6c-d021844ab7b8","Type":"ContainerStarted","Data":"d50f20ce99b89a005c4c1bd9b7d933fe77637f2221027d0398a1c7ecd4800d91"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.190313 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.191435 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.691423417 +0000 UTC m=+146.001712230 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.208753 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" event={"ID":"3c05c4fe-136b-402a-a35a-f91147e07150","Type":"ContainerStarted","Data":"b5a65f481670a90cb0b7fdb00bf35dbdb83aaf85a34f050bc5e6878065015cf9"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.221496 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" event={"ID":"4009a9bc-41a7-4ddb-acda-11944ba3eb2d","Type":"ContainerStarted","Data":"d295f61d6d9173ef988c075824e05ea65af79617689a363451d2b044e6e4ec54"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.221535 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" event={"ID":"4009a9bc-41a7-4ddb-acda-11944ba3eb2d","Type":"ContainerStarted","Data":"28f010166154cf50b61fabeb148f6a770d3aed16f29ff6d6c407a9c9f7d20f07"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.230745 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-7sffz" event={"ID":"257397de-bb75-4f16-93d0-d516ea1938ff","Type":"ContainerStarted","Data":"73674ee87aaf1570c481d3cb9e3c4ec8dc0481c6b90c043ca935f670a4885461"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.232524 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" podStartSLOduration=125.232510337 podStartE2EDuration="2m5.232510337s" podCreationTimestamp="2025-09-29 22:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.230587888 +0000 UTC m=+145.540876701" watchObservedRunningTime="2025-09-29 22:29:01.232510337 +0000 UTC m=+145.542799150" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.260247 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" event={"ID":"8277139a-08f5-442b-a0f5-c2d173f1b427","Type":"ContainerStarted","Data":"db94b1b0293726bbd9d34e15b7b90291ccac1ab2a9617996fedd619c39af2b43"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.263183 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5chmq" podStartSLOduration=124.263022069 podStartE2EDuration="2m4.263022069s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.260887915 +0000 UTC m=+145.571176728" watchObservedRunningTime="2025-09-29 22:29:01.263022069 +0000 UTC m=+145.573310882" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.292581 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fq7mw" event={"ID":"ddedd179-84f4-4532-9d1b-eed45990a6e2","Type":"ContainerStarted","Data":"157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.293873 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.294886 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.794865735 +0000 UTC m=+146.105154548 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.294986 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" podStartSLOduration=124.294971068 podStartE2EDuration="2m4.294971068s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.293585363 +0000 UTC m=+145.603874176" watchObservedRunningTime="2025-09-29 22:29:01.294971068 +0000 UTC m=+145.605259871" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.341917 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" event={"ID":"9267deb7-bbba-4e8a-923b-7be1559f83ce","Type":"ContainerStarted","Data":"c42a8e1036a71fa484733ca576a08a5198f9480b695ea081315198da3a684b41"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.349459 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-p6rml" event={"ID":"a30ab676-362b-42ca-9eb0-8d42e4196078","Type":"ContainerStarted","Data":"5253c99573ab184a669d538fce82051e7e925e6cc22b1f1d28e45b2370c9dae5"} Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.349491 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-p6rml" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.354212 4922 patch_prober.go:28] interesting pod/downloads-7954f5f757-p6rml container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.354257 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-p6rml" podUID="a30ab676-362b-42ca-9eb0-8d42e4196078" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.358343 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.399244 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.404837 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:01.904820768 +0000 UTC m=+146.215109581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.420312 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-vqqxw" podStartSLOduration=124.420296439 podStartE2EDuration="2m4.420296439s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.39858207 +0000 UTC m=+145.708870883" watchObservedRunningTime="2025-09-29 22:29:01.420296439 +0000 UTC m=+145.730585252" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.466435 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-jkrwh" podStartSLOduration=124.466378755 podStartE2EDuration="2m4.466378755s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.449128299 +0000 UTC m=+145.759417102" watchObservedRunningTime="2025-09-29 22:29:01.466378755 +0000 UTC m=+145.776667568" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.466684 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-h55bj" podStartSLOduration=125.466679833 podStartE2EDuration="2m5.466679833s" podCreationTimestamp="2025-09-29 22:26:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.421014077 +0000 UTC m=+145.731302890" watchObservedRunningTime="2025-09-29 22:29:01.466679833 +0000 UTC m=+145.776968646" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.510371 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.510548 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.010531612 +0000 UTC m=+146.320820425 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.513009 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.513314 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.013305982 +0000 UTC m=+146.323594785 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.533039 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" podStartSLOduration=124.533026601 podStartE2EDuration="2m4.533026601s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.490785673 +0000 UTC m=+145.801074486" watchObservedRunningTime="2025-09-29 22:29:01.533026601 +0000 UTC m=+145.843315414" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.604816 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-5mtpj" podStartSLOduration=124.604801837 podStartE2EDuration="2m4.604801837s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.579088137 +0000 UTC m=+145.889376950" watchObservedRunningTime="2025-09-29 22:29:01.604801837 +0000 UTC m=+145.915090650" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.610303 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" podStartSLOduration=124.610288406 podStartE2EDuration="2m4.610288406s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.603439633 +0000 UTC m=+145.913728446" watchObservedRunningTime="2025-09-29 22:29:01.610288406 +0000 UTC m=+145.920577219" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.617327 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.617787 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.117772286 +0000 UTC m=+146.428061099 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.664257 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" podStartSLOduration=124.664241602 podStartE2EDuration="2m4.664241602s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.663282447 +0000 UTC m=+145.973571260" watchObservedRunningTime="2025-09-29 22:29:01.664241602 +0000 UTC m=+145.974530415" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.666251 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-7sffz" podStartSLOduration=6.666243992 podStartE2EDuration="6.666243992s" podCreationTimestamp="2025-09-29 22:28:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.631247847 +0000 UTC m=+145.941536660" watchObservedRunningTime="2025-09-29 22:29:01.666243992 +0000 UTC m=+145.976532795" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.691696 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-p6rml" podStartSLOduration=124.691668566 podStartE2EDuration="2m4.691668566s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.689425519 +0000 UTC m=+145.999714332" watchObservedRunningTime="2025-09-29 22:29:01.691668566 +0000 UTC m=+146.001957379" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.725085 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.725442 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.22542682 +0000 UTC m=+146.535715633 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.820866 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-jvcm2" podStartSLOduration=124.820848785 podStartE2EDuration="2m4.820848785s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.789623565 +0000 UTC m=+146.099912378" watchObservedRunningTime="2025-09-29 22:29:01.820848785 +0000 UTC m=+146.131137598" Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.828135 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.828428 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.328413026 +0000 UTC m=+146.638701839 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:01 crc kubenswrapper[4922]: I0929 22:29:01.929099 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:01 crc kubenswrapper[4922]: E0929 22:29:01.929717 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.4297057 +0000 UTC m=+146.739994513 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.033571 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.034529 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.534510442 +0000 UTC m=+146.844799255 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.046133 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:02 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:02 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:02 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.046175 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.135375 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.135779 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.635767404 +0000 UTC m=+146.946056217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.236665 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.236825 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.736797891 +0000 UTC m=+147.047086704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.237155 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.237555 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.73754256 +0000 UTC m=+147.047831373 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.338763 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.338912 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.838883435 +0000 UTC m=+147.149172248 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.338976 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.339326 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.839315406 +0000 UTC m=+147.149604219 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.356115 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" event={"ID":"6916d599-759f-47bc-a87f-dc01ce91aed0","Type":"ContainerStarted","Data":"5e5f15be17e85ba50f4bca5dd5b118be72c4441da3e4a9c4d78fca388ac32414"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.356185 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" event={"ID":"6916d599-759f-47bc-a87f-dc01ce91aed0","Type":"ContainerStarted","Data":"f29fc879ee581fb95ab0b421497c1c2d48040037d220b9abfb8c7991d7bbb75b"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.357601 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-tc6gr" event={"ID":"620d277e-b41f-4876-9b64-577a5a4f149e","Type":"ContainerStarted","Data":"4b986c40fe3a578af181f244da15e654ea87c0498804f38f64b159e8c1e72e23"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.359508 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" event={"ID":"4009a9bc-41a7-4ddb-acda-11944ba3eb2d","Type":"ContainerStarted","Data":"4a7dfd490d4f4281b57e4379f7ad2648b8a232aec3b615834161b91e36a7f845"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.362797 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" event={"ID":"e5866976-5fbc-455a-bfe9-f58b6e8b58ae","Type":"ContainerStarted","Data":"09548a0738c91c5fb56c379d310a93add03314ee51190f87705c98cf01d5bfa3"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.362940 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" event={"ID":"e5866976-5fbc-455a-bfe9-f58b6e8b58ae","Type":"ContainerStarted","Data":"44953870cb6f13703e156de8e882d8542a58f5fcee88d4174b2af81e6ded62a8"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.363584 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.364881 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-6mvkp" event={"ID":"3c5d1982-8fe9-4776-83a4-2bd856394360","Type":"ContainerStarted","Data":"04ab96af6314ae7b97b8b632fdd4acee213e278c3ea0ed87657f81e4d9284045"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.364959 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-6mvkp" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.367299 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-ts4x9" event={"ID":"3c05c4fe-136b-402a-a35a-f91147e07150","Type":"ContainerStarted","Data":"e6158a3f2e82e2f3cc2ad5c880c62cf37b6ab097de6fe2633e766b2da32e4009"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.370767 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj" event={"ID":"edadc037-767e-469d-95d1-e620b2e87d5f","Type":"ContainerStarted","Data":"44a1f9c7111ab09f03ee13e70c0edd7982520385dc97094b02751cb2a6fbed24"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.373288 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" event={"ID":"8a112806-91a3-4b3a-9c4d-74a8f723e5ce","Type":"ContainerStarted","Data":"c5217d8e5bda6989413f92e5bf4ba158f9b6795fd93f2ce7d6462552f41192f7"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.373331 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" event={"ID":"8a112806-91a3-4b3a-9c4d-74a8f723e5ce","Type":"ContainerStarted","Data":"d8025077d025c22c8d6eb08f1138e093367e2644a21eec0d00f3bf26df5e59a2"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.385143 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" event={"ID":"e73942c9-01ae-46bc-9fa2-d8c64727cadf","Type":"ContainerStarted","Data":"249a4199ae955b595b70bd4441d4e52026e8cc5528677620860a6b067d1764ea"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.392615 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" event={"ID":"8b4f4aa2-1d81-4cd8-8355-f785c8443411","Type":"ContainerStarted","Data":"10afb8776ca31112f5fc3d1fcb3cdd5f56bcf69322a8e0db90f45536ab810ed9"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.394754 4922 generic.go:334] "Generic (PLEG): container finished" podID="073755aa-250f-4eef-bbd8-434dbe15e772" containerID="c9c25aa97ff2cfe0e0c3fb416ebe872a474324780c093cf71c3d5566db86dcb0" exitCode=0 Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.394798 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" event={"ID":"073755aa-250f-4eef-bbd8-434dbe15e772","Type":"ContainerDied","Data":"c9c25aa97ff2cfe0e0c3fb416ebe872a474324780c093cf71c3d5566db86dcb0"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.394854 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" event={"ID":"073755aa-250f-4eef-bbd8-434dbe15e772","Type":"ContainerStarted","Data":"45fab20ab48cd90a5de702d4daf9cb72fdf3d84aba82eb6ca360ce4377ba2e13"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.395137 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.399447 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" event={"ID":"b895eae8-d790-41fe-a942-6e0c3d478c2d","Type":"ContainerStarted","Data":"94048a030a470b95a31e1aff3e38b78c392213375b522e2a175c06bba64bbe54"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.399478 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" event={"ID":"b895eae8-d790-41fe-a942-6e0c3d478c2d","Type":"ContainerStarted","Data":"bc96bf05978e1fc6be638d569cf02eec66ea16f6112dac1b5932bb135fb98b5a"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.402860 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" event={"ID":"4dba6962-b548-45c2-9197-4071b577c2f5","Type":"ContainerStarted","Data":"dec8b6f21d39f0b2948f267abb5d7465c315bc134aa1937a207102053829a99f"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.408412 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" event={"ID":"dacb34ee-2394-415c-8e84-dcd07ac61cc8","Type":"ContainerStarted","Data":"56250932f6ab8da1b88ffe44aadeb0ef3b7515b8e389abf81e9441295b952106"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.408611 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.410283 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" event={"ID":"8928157b-0f62-4952-96ef-9a0a8f543682","Type":"ContainerStarted","Data":"aec5860dd9ef5d8a3b107f3b97b4f91affbe68c3cf6115c698ab35e11be6e003"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.417410 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" event={"ID":"37f865e7-1dae-4f10-8d6c-d021844ab7b8","Type":"ContainerStarted","Data":"d4f5926ac8499a47a36e300912b0e1c39b32b5b3d0a6601ee749b3b7498fb9d0"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.417651 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" event={"ID":"37f865e7-1dae-4f10-8d6c-d021844ab7b8","Type":"ContainerStarted","Data":"c9face122a7f32240abc79a45938af75d140b8b35f5006ea8046b5bdefcbaceb"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.436230 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-fq7mw" podStartSLOduration=125.436216238 podStartE2EDuration="2m5.436216238s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:01.823569124 +0000 UTC m=+146.133857957" watchObservedRunningTime="2025-09-29 22:29:02.436216238 +0000 UTC m=+146.746505051" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.437471 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.440581 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" event={"ID":"4499e978-6c8b-4d19-98da-9067d3d01ad8","Type":"ContainerStarted","Data":"be0c7ddb6f78a9be42ceaf431fcd784b5ca3fed33b714a6a3e14d4a1b86e3011"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.440669 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.441840 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:02.94182205 +0000 UTC m=+147.252110863 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.466063 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" event={"ID":"69f43bae-138c-406c-83de-8145bd743c82","Type":"ContainerStarted","Data":"f40ba3dd49c100218d96fccf57d350697227da294f91c126e51d04149b9a26a5"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.466109 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" event={"ID":"69f43bae-138c-406c-83de-8145bd743c82","Type":"ContainerStarted","Data":"e90aa634d65155fa56bf56826e5b65a69041cac5f7c22c0f4272c1fe0bdd40fa"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.478602 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" event={"ID":"deaead36-f205-4377-938d-53c8f82d5c03","Type":"ContainerStarted","Data":"2ab1ef4892ac596dc2697f1b5a09c88ed82ff60abebcf1c24797ec8cb551a0b4"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.490914 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" event={"ID":"d51b12db-7451-4cf2-bf6e-a156e2654342","Type":"ContainerStarted","Data":"b2fb5227972ad9ae476fa41df4649528697499ac0149502a3d6c020c10179213"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.491803 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.494912 4922 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xb8f7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.494967 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" podUID="d51b12db-7451-4cf2-bf6e-a156e2654342" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.513343 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-q8cqh" podStartSLOduration=125.513326859 podStartE2EDuration="2m5.513326859s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.450730865 +0000 UTC m=+146.761019678" watchObservedRunningTime="2025-09-29 22:29:02.513326859 +0000 UTC m=+146.823615672" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.513993 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-7sffz" event={"ID":"257397de-bb75-4f16-93d0-d516ea1938ff","Type":"ContainerStarted","Data":"8b6ae5b45d5ec1224b30556ed091de3e84313f6621f65a0ab7167945684ca335"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.514401 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" podStartSLOduration=125.514383146 podStartE2EDuration="2m5.514383146s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.505258495 +0000 UTC m=+146.815547308" watchObservedRunningTime="2025-09-29 22:29:02.514383146 +0000 UTC m=+146.824671959" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.543885 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.546367 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.046350815 +0000 UTC m=+147.356639618 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.554404 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-szc8j" event={"ID":"0f80dc4b-9880-42b5-97ae-8475d9087763","Type":"ContainerStarted","Data":"a440abe3f28af0d8af3ac4540265154aa6d82c6e458e3de044f0b16abae9ca10"} Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.563881 4922 patch_prober.go:28] interesting pod/downloads-7954f5f757-p6rml container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.564116 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-p6rml" podUID="a30ab676-362b-42ca-9eb0-8d42e4196078" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.577784 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-s985q" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.589724 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-9whkr" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.653787 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.656125 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.156104023 +0000 UTC m=+147.466392836 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.656742 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" podStartSLOduration=125.656732708 podStartE2EDuration="2m5.656732708s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.654744318 +0000 UTC m=+146.965033131" watchObservedRunningTime="2025-09-29 22:29:02.656732708 +0000 UTC m=+146.967021521" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.699236 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4j4lc" podStartSLOduration=125.699219664 podStartE2EDuration="2m5.699219664s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.696746901 +0000 UTC m=+147.007035714" watchObservedRunningTime="2025-09-29 22:29:02.699219664 +0000 UTC m=+147.009508477" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.726785 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bq4jt" podStartSLOduration=125.726769241 podStartE2EDuration="2m5.726769241s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.725811827 +0000 UTC m=+147.036100640" watchObservedRunningTime="2025-09-29 22:29:02.726769241 +0000 UTC m=+147.037058054" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.755970 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.756062 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5hh74" podStartSLOduration=125.756046892 podStartE2EDuration="2m5.756046892s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.75437909 +0000 UTC m=+147.064667903" watchObservedRunningTime="2025-09-29 22:29:02.756046892 +0000 UTC m=+147.066335705" Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.756331 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.256319479 +0000 UTC m=+147.566608292 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.786927 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-6mvkp" podStartSLOduration=7.786908383 podStartE2EDuration="7.786908383s" podCreationTimestamp="2025-09-29 22:28:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.786729168 +0000 UTC m=+147.097017981" watchObservedRunningTime="2025-09-29 22:29:02.786908383 +0000 UTC m=+147.097197196" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.810033 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-kx6zb" podStartSLOduration=125.810017788 podStartE2EDuration="2m5.810017788s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.808702364 +0000 UTC m=+147.118991177" watchObservedRunningTime="2025-09-29 22:29:02.810017788 +0000 UTC m=+147.120306591" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.859481 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.859775 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.359762406 +0000 UTC m=+147.670051219 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.891050 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" podStartSLOduration=125.891031598 podStartE2EDuration="2m5.891031598s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.854208036 +0000 UTC m=+147.164496849" watchObservedRunningTime="2025-09-29 22:29:02.891031598 +0000 UTC m=+147.201320411" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.915215 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.915491 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.919505 4922 patch_prober.go:28] interesting pod/apiserver-76f77b778f-lzx9z container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.8:8443/livez\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.919546 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" podUID="b895eae8-d790-41fe-a942-6e0c3d478c2d" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.8:8443/livez\": dial tcp 10.217.0.8:8443: connect: connection refused" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.943141 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-sr875" podStartSLOduration=125.943126806 podStartE2EDuration="2m5.943126806s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.891801357 +0000 UTC m=+147.202090170" watchObservedRunningTime="2025-09-29 22:29:02.943126806 +0000 UTC m=+147.253415609" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.943432 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" podStartSLOduration=125.943426724 podStartE2EDuration="2m5.943426724s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.940725035 +0000 UTC m=+147.251013848" watchObservedRunningTime="2025-09-29 22:29:02.943426724 +0000 UTC m=+147.253715537" Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.962806 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:02 crc kubenswrapper[4922]: E0929 22:29:02.963090 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.463079171 +0000 UTC m=+147.773367984 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:02 crc kubenswrapper[4922]: I0929 22:29:02.975287 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-tc6gr" podStartSLOduration=7.97527239 podStartE2EDuration="7.97527239s" podCreationTimestamp="2025-09-29 22:28:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:02.974702665 +0000 UTC m=+147.284991478" watchObservedRunningTime="2025-09-29 22:29:02.97527239 +0000 UTC m=+147.285561203" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.007627 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-89dqj" podStartSLOduration=126.007601888 podStartE2EDuration="2m6.007601888s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:03.00373406 +0000 UTC m=+147.314022863" watchObservedRunningTime="2025-09-29 22:29:03.007601888 +0000 UTC m=+147.317890701" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.040538 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:03 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:03 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:03 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.040583 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.064698 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.064959 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.564947019 +0000 UTC m=+147.875235832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.093669 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" podStartSLOduration=126.093653086 podStartE2EDuration="2m6.093653086s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:03.093233845 +0000 UTC m=+147.403522658" watchObservedRunningTime="2025-09-29 22:29:03.093653086 +0000 UTC m=+147.403941899" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.139048 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" podStartSLOduration=126.139031394 podStartE2EDuration="2m6.139031394s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:03.111735643 +0000 UTC m=+147.422024456" watchObservedRunningTime="2025-09-29 22:29:03.139031394 +0000 UTC m=+147.449320207" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.147645 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.147940 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.166309 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.166670 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.666658463 +0000 UTC m=+147.976947276 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.208953 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rrwxb" podStartSLOduration=126.208939213 podStartE2EDuration="2m6.208939213s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:03.208749028 +0000 UTC m=+147.519037841" watchObservedRunningTime="2025-09-29 22:29:03.208939213 +0000 UTC m=+147.519228016" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.211012 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-57bhd" podStartSLOduration=126.211006385 podStartE2EDuration="2m6.211006385s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:03.158556498 +0000 UTC m=+147.468845311" watchObservedRunningTime="2025-09-29 22:29:03.211006385 +0000 UTC m=+147.521295198" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.267065 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.267670 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.767655909 +0000 UTC m=+148.077944722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.377023 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.377353 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.877343165 +0000 UTC m=+148.187631978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.405479 4922 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-cw6jc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.405544 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" podUID="4dba6962-b548-45c2-9197-4071b577c2f5" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.477807 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.477998 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.977970311 +0000 UTC m=+148.288259124 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.478254 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.478545 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:03.978538506 +0000 UTC m=+148.288827319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.560431 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" event={"ID":"8928157b-0f62-4952-96ef-9a0a8f543682","Type":"ContainerStarted","Data":"0b50c2b967e8b89b54470a2cd49af60c1f4df3b64a52577e9a33bdf8a2e5f68b"} Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.560936 4922 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xb8f7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.560988 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" podUID="d51b12db-7451-4cf2-bf6e-a156e2654342" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.575560 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-cw6jc" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.579561 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.581188 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.081161923 +0000 UTC m=+148.391450736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.682112 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.682637 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.18261663 +0000 UTC m=+148.492905443 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.782808 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.783183 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.283168695 +0000 UTC m=+148.593457498 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.809425 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4k5qr"] Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.810296 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.821776 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.826898 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4k5qr"] Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.886043 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-catalog-content\") pod \"community-operators-4k5qr\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.886083 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74mcv\" (UniqueName: \"kubernetes.io/projected/ce910279-f40a-4b94-9be2-718aa508e1d1-kube-api-access-74mcv\") pod \"community-operators-4k5qr\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.886099 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-utilities\") pod \"community-operators-4k5qr\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.886149 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.886420 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.386408818 +0000 UTC m=+148.696697631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.984194 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c7r4b"] Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.985017 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.986964 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.987150 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.487123637 +0000 UTC m=+148.797412450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.987224 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-catalog-content\") pod \"community-operators-4k5qr\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.987259 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-utilities\") pod \"community-operators-4k5qr\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.987280 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74mcv\" (UniqueName: \"kubernetes.io/projected/ce910279-f40a-4b94-9be2-718aa508e1d1-kube-api-access-74mcv\") pod \"community-operators-4k5qr\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.987370 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:03 crc kubenswrapper[4922]: E0929 22:29:03.987787 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.487773383 +0000 UTC m=+148.798062196 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.988016 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-catalog-content\") pod \"community-operators-4k5qr\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.988042 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-utilities\") pod \"community-operators-4k5qr\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:03 crc kubenswrapper[4922]: I0929 22:29:03.989249 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.045311 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74mcv\" (UniqueName: \"kubernetes.io/projected/ce910279-f40a-4b94-9be2-718aa508e1d1-kube-api-access-74mcv\") pod \"community-operators-4k5qr\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.046353 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:04 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:04 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:04 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.046403 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.088239 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.088425 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb2wl\" (UniqueName: \"kubernetes.io/projected/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-kube-api-access-pb2wl\") pod \"certified-operators-c7r4b\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.088504 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-utilities\") pod \"certified-operators-c7r4b\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.088542 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-catalog-content\") pod \"certified-operators-c7r4b\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.088617 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.588604745 +0000 UTC m=+148.898893558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.099888 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.107294 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c7r4b"] Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.163738 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jd4wx"] Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.164629 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.165541 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.180562 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jd4wx"] Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.191085 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-utilities\") pod \"certified-operators-c7r4b\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.191457 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-utilities\") pod \"certified-operators-c7r4b\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.191501 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-catalog-content\") pod \"community-operators-jd4wx\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.191547 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-catalog-content\") pod \"certified-operators-c7r4b\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.191580 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb2wl\" (UniqueName: \"kubernetes.io/projected/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-kube-api-access-pb2wl\") pod \"certified-operators-c7r4b\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.191608 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-utilities\") pod \"community-operators-jd4wx\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.191626 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dlhv\" (UniqueName: \"kubernetes.io/projected/86303f4e-f5e5-4779-b045-1f56a0ef1b32-kube-api-access-6dlhv\") pod \"community-operators-jd4wx\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.191646 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.191876 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.691865308 +0000 UTC m=+149.002154121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.192200 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-catalog-content\") pod \"certified-operators-c7r4b\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.226247 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb2wl\" (UniqueName: \"kubernetes.io/projected/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-kube-api-access-pb2wl\") pod \"certified-operators-c7r4b\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.298670 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.298925 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.798904257 +0000 UTC m=+149.109193070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.299055 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.299081 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-utilities\") pod \"community-operators-jd4wx\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.299105 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dlhv\" (UniqueName: \"kubernetes.io/projected/86303f4e-f5e5-4779-b045-1f56a0ef1b32-kube-api-access-6dlhv\") pod \"community-operators-jd4wx\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.299129 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.299187 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.299211 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-catalog-content\") pod \"community-operators-jd4wx\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.299565 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-catalog-content\") pod \"community-operators-jd4wx\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.300784 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.301024 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.80101491 +0000 UTC m=+149.111303723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.301049 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-utilities\") pod \"community-operators-jd4wx\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.304644 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.306578 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.328020 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dlhv\" (UniqueName: \"kubernetes.io/projected/86303f4e-f5e5-4779-b045-1f56a0ef1b32-kube-api-access-6dlhv\") pod \"community-operators-jd4wx\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.361373 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-22bjg"] Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.362184 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.374748 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-22bjg"] Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.399705 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.399864 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.399913 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8264q\" (UniqueName: \"kubernetes.io/projected/f8c0a27c-3686-4b44-8a91-45744aa8e551-kube-api-access-8264q\") pod \"certified-operators-22bjg\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.399937 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-utilities\") pod \"certified-operators-22bjg\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.399975 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-catalog-content\") pod \"certified-operators-22bjg\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.399994 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.401606 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:04.901583615 +0000 UTC m=+149.211872428 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.411032 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.416226 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.447240 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.467546 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.482386 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.502002 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8264q\" (UniqueName: \"kubernetes.io/projected/f8c0a27c-3686-4b44-8a91-45744aa8e551-kube-api-access-8264q\") pod \"certified-operators-22bjg\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.502036 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-utilities\") pod \"certified-operators-22bjg\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.502060 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.502090 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-catalog-content\") pod \"certified-operators-22bjg\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.502467 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-catalog-content\") pod \"certified-operators-22bjg\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.502911 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-utilities\") pod \"certified-operators-22bjg\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.503117 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.003106404 +0000 UTC m=+149.313395217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.514978 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.526913 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8264q\" (UniqueName: \"kubernetes.io/projected/f8c0a27c-3686-4b44-8a91-45744aa8e551-kube-api-access-8264q\") pod \"certified-operators-22bjg\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.574980 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vm59q" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.603093 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.603438 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.103422313 +0000 UTC m=+149.413711126 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.616526 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c7r4b"] Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.617227 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" event={"ID":"8928157b-0f62-4952-96ef-9a0a8f543682","Type":"ContainerStarted","Data":"cfc2718eaeb3e4ad622aa1a42d80f46b7dab4ca86cb02e49c86935f3852019c4"} Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.617250 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" event={"ID":"8928157b-0f62-4952-96ef-9a0a8f543682","Type":"ContainerStarted","Data":"897bdc166795d4a11595e30066bcf643a66b470f986c2c882ae0f75b82f2d27c"} Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.640554 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-8wfcd" podStartSLOduration=9.640540932 podStartE2EDuration="9.640540932s" podCreationTimestamp="2025-09-29 22:28:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:04.639115076 +0000 UTC m=+148.949403889" watchObservedRunningTime="2025-09-29 22:29:04.640540932 +0000 UTC m=+148.950829745" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.646561 4922 generic.go:334] "Generic (PLEG): container finished" podID="4499e978-6c8b-4d19-98da-9067d3d01ad8" containerID="be0c7ddb6f78a9be42ceaf431fcd784b5ca3fed33b714a6a3e14d4a1b86e3011" exitCode=0 Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.647641 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" event={"ID":"4499e978-6c8b-4d19-98da-9067d3d01ad8","Type":"ContainerDied","Data":"be0c7ddb6f78a9be42ceaf431fcd784b5ca3fed33b714a6a3e14d4a1b86e3011"} Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.653491 4922 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-xb8f7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.653530 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" podUID="d51b12db-7451-4cf2-bf6e-a156e2654342" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.660862 4922 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.662393 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-mvvtp" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.689291 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.705348 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.719599 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.219581423 +0000 UTC m=+149.529870236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.808968 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.809093 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.309068607 +0000 UTC m=+149.619357420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.809198 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.809995 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.309987621 +0000 UTC m=+149.620276434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.902025 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4k5qr"] Sep 29 22:29:04 crc kubenswrapper[4922]: I0929 22:29:04.910402 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:04 crc kubenswrapper[4922]: E0929 22:29:04.910755 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.41072843 +0000 UTC m=+149.721017243 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.011334 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.011901 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.51189041 +0000 UTC m=+149.822179223 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.036418 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:05 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:05 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:05 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.036453 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.113561 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.113973 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.613955012 +0000 UTC m=+149.924243825 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.163510 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-22bjg"] Sep 29 22:29:05 crc kubenswrapper[4922]: W0929 22:29:05.174429 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8c0a27c_3686_4b44_8a91_45744aa8e551.slice/crio-ab7f5e67c3ed1feed7dba40b74a5182b97af18f6c43454cdd54c98a9c060c878 WatchSource:0}: Error finding container ab7f5e67c3ed1feed7dba40b74a5182b97af18f6c43454cdd54c98a9c060c878: Status 404 returned error can't find the container with id ab7f5e67c3ed1feed7dba40b74a5182b97af18f6c43454cdd54c98a9c060c878 Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.215215 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.215497 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.715486262 +0000 UTC m=+150.025775075 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.274746 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jd4wx"] Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.315748 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.315901 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.815876292 +0000 UTC m=+150.126165105 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.316018 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.316303 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.816295253 +0000 UTC m=+150.126584066 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: W0929 22:29:05.327958 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-d80f050d25aac9cbcbceee61c54f1b87bbd6fb1e5381ec1fdcc85775643197ca WatchSource:0}: Error finding container d80f050d25aac9cbcbceee61c54f1b87bbd6fb1e5381ec1fdcc85775643197ca: Status 404 returned error can't find the container with id d80f050d25aac9cbcbceee61c54f1b87bbd6fb1e5381ec1fdcc85775643197ca Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.417491 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.417697 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.917670858 +0000 UTC m=+150.227959671 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.417760 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.418068 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:05.918054428 +0000 UTC m=+150.228343241 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.518496 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.518717 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:06.018685255 +0000 UTC m=+150.328974088 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.518965 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.519274 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:06.019261359 +0000 UTC m=+150.329550162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.612765 4922 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-29T22:29:04.660889027Z","Handler":null,"Name":""} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.619700 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.620043 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 22:29:06.120012719 +0000 UTC m=+150.430301572 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.620102 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:05 crc kubenswrapper[4922]: E0929 22:29:05.621697 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 22:29:06.121591299 +0000 UTC m=+150.431880142 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-rwhm5" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.626647 4922 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.626692 4922 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.653975 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3fd20b67c0a2c987a49b2cdc3e18ae4feb940d8cc5b5b0a8ecdd3050982fbade"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.654056 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"49b6ed06930baf64c02284e166027a8679174483d8c89594ae7983fb6e4be4b5"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.657322 4922 generic.go:334] "Generic (PLEG): container finished" podID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerID="8d5a31b7981b1f6e1dc394740ce4b05a439415fd49a744a60ec831ed4e39f4e8" exitCode=0 Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.657371 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c7r4b" event={"ID":"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976","Type":"ContainerDied","Data":"8d5a31b7981b1f6e1dc394740ce4b05a439415fd49a744a60ec831ed4e39f4e8"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.657421 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c7r4b" event={"ID":"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976","Type":"ContainerStarted","Data":"90f83069412c5495841cc0719e3d841e8e9151fa65aef0c2ce28b2d9c8a236a1"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.660333 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d80f050d25aac9cbcbceee61c54f1b87bbd6fb1e5381ec1fdcc85775643197ca"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.660374 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.663850 4922 generic.go:334] "Generic (PLEG): container finished" podID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerID="f8babb7cb6717cef45758cf9b40fc7b78b82be4ae7c77197ca60b9ff49ac4668" exitCode=0 Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.663902 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k5qr" event={"ID":"ce910279-f40a-4b94-9be2-718aa508e1d1","Type":"ContainerDied","Data":"f8babb7cb6717cef45758cf9b40fc7b78b82be4ae7c77197ca60b9ff49ac4668"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.663963 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k5qr" event={"ID":"ce910279-f40a-4b94-9be2-718aa508e1d1","Type":"ContainerStarted","Data":"2f1f182fd273c0d8b33592897b3683ea1023b3f4eadbcdebc60b8ce6729a8a20"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.665061 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jd4wx" event={"ID":"86303f4e-f5e5-4779-b045-1f56a0ef1b32","Type":"ContainerStarted","Data":"5f643a88f892305bd9a356e14aa465d67206710f3091c6e0a7dbc22b6cbb40d7"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.666584 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"d574c78b3c95d12aeabacb1aec307094cf01552e812cdc02b62b10c0799ced9e"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.666623 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"cc8d5f979ab34cbbd87765e96f982887ba5fe0a60c87552116cd1496a4a3179c"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.668287 4922 generic.go:334] "Generic (PLEG): container finished" podID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerID="a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957" exitCode=0 Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.669433 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bjg" event={"ID":"f8c0a27c-3686-4b44-8a91-45744aa8e551","Type":"ContainerDied","Data":"a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.669484 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bjg" event={"ID":"f8c0a27c-3686-4b44-8a91-45744aa8e551","Type":"ContainerStarted","Data":"ab7f5e67c3ed1feed7dba40b74a5182b97af18f6c43454cdd54c98a9c060c878"} Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.721612 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.727116 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.825051 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.828209 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.828252 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.918708 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-rwhm5\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.946640 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2bmls"] Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.947584 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.950237 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.958802 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2bmls"] Sep 29 22:29:05 crc kubenswrapper[4922]: I0929 22:29:05.985275 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.027402 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4499e978-6c8b-4d19-98da-9067d3d01ad8-secret-volume\") pod \"4499e978-6c8b-4d19-98da-9067d3d01ad8\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.027508 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4j29\" (UniqueName: \"kubernetes.io/projected/4499e978-6c8b-4d19-98da-9067d3d01ad8-kube-api-access-x4j29\") pod \"4499e978-6c8b-4d19-98da-9067d3d01ad8\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.027543 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4499e978-6c8b-4d19-98da-9067d3d01ad8-config-volume\") pod \"4499e978-6c8b-4d19-98da-9067d3d01ad8\" (UID: \"4499e978-6c8b-4d19-98da-9067d3d01ad8\") " Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.027734 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-catalog-content\") pod \"redhat-marketplace-2bmls\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.027787 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-utilities\") pod \"redhat-marketplace-2bmls\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.027831 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62qhl\" (UniqueName: \"kubernetes.io/projected/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-kube-api-access-62qhl\") pod \"redhat-marketplace-2bmls\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.028557 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4499e978-6c8b-4d19-98da-9067d3d01ad8-config-volume" (OuterVolumeSpecName: "config-volume") pod "4499e978-6c8b-4d19-98da-9067d3d01ad8" (UID: "4499e978-6c8b-4d19-98da-9067d3d01ad8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.033304 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4499e978-6c8b-4d19-98da-9067d3d01ad8-kube-api-access-x4j29" (OuterVolumeSpecName: "kube-api-access-x4j29") pod "4499e978-6c8b-4d19-98da-9067d3d01ad8" (UID: "4499e978-6c8b-4d19-98da-9067d3d01ad8"). InnerVolumeSpecName "kube-api-access-x4j29". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.036654 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4499e978-6c8b-4d19-98da-9067d3d01ad8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4499e978-6c8b-4d19-98da-9067d3d01ad8" (UID: "4499e978-6c8b-4d19-98da-9067d3d01ad8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.036875 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:06 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:06 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:06 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.036932 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.126678 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.128378 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-catalog-content\") pod \"redhat-marketplace-2bmls\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.128450 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-utilities\") pod \"redhat-marketplace-2bmls\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.128482 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62qhl\" (UniqueName: \"kubernetes.io/projected/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-kube-api-access-62qhl\") pod \"redhat-marketplace-2bmls\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.128519 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4j29\" (UniqueName: \"kubernetes.io/projected/4499e978-6c8b-4d19-98da-9067d3d01ad8-kube-api-access-x4j29\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.128531 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4499e978-6c8b-4d19-98da-9067d3d01ad8-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.128540 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4499e978-6c8b-4d19-98da-9067d3d01ad8-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.129210 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-catalog-content\") pod \"redhat-marketplace-2bmls\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.129277 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-utilities\") pod \"redhat-marketplace-2bmls\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.152673 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62qhl\" (UniqueName: \"kubernetes.io/projected/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-kube-api-access-62qhl\") pod \"redhat-marketplace-2bmls\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.300935 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.355885 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ntnpl"] Sep 29 22:29:06 crc kubenswrapper[4922]: E0929 22:29:06.356073 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4499e978-6c8b-4d19-98da-9067d3d01ad8" containerName="collect-profiles" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.356085 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4499e978-6c8b-4d19-98da-9067d3d01ad8" containerName="collect-profiles" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.356178 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4499e978-6c8b-4d19-98da-9067d3d01ad8" containerName="collect-profiles" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.358168 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.369887 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ntnpl"] Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.420700 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.437534 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.437592 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.437691 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-utilities\") pod \"redhat-marketplace-ntnpl\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.437815 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-catalog-content\") pod \"redhat-marketplace-ntnpl\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.437972 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7qcf\" (UniqueName: \"kubernetes.io/projected/941561c9-c666-4f06-a30c-914d9db9ce9e-kube-api-access-r7qcf\") pod \"redhat-marketplace-ntnpl\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.450206 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.451750 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.451936 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.539338 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-catalog-content\") pod \"redhat-marketplace-ntnpl\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.539379 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.539449 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7qcf\" (UniqueName: \"kubernetes.io/projected/941561c9-c666-4f06-a30c-914d9db9ce9e-kube-api-access-r7qcf\") pod \"redhat-marketplace-ntnpl\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.539518 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-utilities\") pod \"redhat-marketplace-ntnpl\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.539556 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.539868 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-catalog-content\") pod \"redhat-marketplace-ntnpl\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.539903 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-utilities\") pod \"redhat-marketplace-ntnpl\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.570062 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7qcf\" (UniqueName: \"kubernetes.io/projected/941561c9-c666-4f06-a30c-914d9db9ce9e-kube-api-access-r7qcf\") pod \"redhat-marketplace-ntnpl\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.573118 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2bmls"] Sep 29 22:29:06 crc kubenswrapper[4922]: W0929 22:29:06.583369 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode56c3a7a_4803_4ffa_94a8_8b0cbc75fb91.slice/crio-1e35ea767e46c9bcdbde6e407298cdba753bf73c64a85b8b70de6415165ccbe3 WatchSource:0}: Error finding container 1e35ea767e46c9bcdbde6e407298cdba753bf73c64a85b8b70de6415165ccbe3: Status 404 returned error can't find the container with id 1e35ea767e46c9bcdbde6e407298cdba753bf73c64a85b8b70de6415165ccbe3 Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.634133 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rwhm5"] Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.641023 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.641130 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.641289 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.655744 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.676864 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2bmls" event={"ID":"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91","Type":"ContainerStarted","Data":"1e35ea767e46c9bcdbde6e407298cdba753bf73c64a85b8b70de6415165ccbe3"} Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.679132 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a7382f7aa1b3db91b2297841f718f5058e1cdcac6e178eb2c1e8611372fa9f97"} Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.679268 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.682832 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" event={"ID":"4499e978-6c8b-4d19-98da-9067d3d01ad8","Type":"ContainerDied","Data":"f397cdee4f2c37f67584f4cecc032a4d68cb2dee3d8f4f13b1f8577597bb6864"} Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.682861 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f397cdee4f2c37f67584f4cecc032a4d68cb2dee3d8f4f13b1f8577597bb6864" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.682886 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.684192 4922 generic.go:334] "Generic (PLEG): container finished" podID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerID="7a9df367393bfe5d6072140c4aba458d4432409685a77a751496ae8455c8c324" exitCode=0 Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.684232 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jd4wx" event={"ID":"86303f4e-f5e5-4779-b045-1f56a0ef1b32","Type":"ContainerDied","Data":"7a9df367393bfe5d6072140c4aba458d4432409685a77a751496ae8455c8c324"} Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.686159 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" event={"ID":"104a5cfb-7f2a-48d1-be00-10f698d0b552","Type":"ContainerStarted","Data":"7c10ab9754767a0188430929855ee21f8df3cf81117ba5f36595628489c731f7"} Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.691570 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.780776 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.967880 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lw9w7"] Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.969938 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lw9w7"] Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.971028 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.972993 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 22:29:06 crc kubenswrapper[4922]: I0929 22:29:06.991790 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ntnpl"] Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.028983 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.044005 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:07 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:07 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:07 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.044058 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.046942 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz2lr\" (UniqueName: \"kubernetes.io/projected/e5e8f04f-0814-4db4-8614-ce1180ed65ca-kube-api-access-wz2lr\") pod \"redhat-operators-lw9w7\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.046980 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-utilities\") pod \"redhat-operators-lw9w7\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.047009 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-catalog-content\") pod \"redhat-operators-lw9w7\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.151974 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz2lr\" (UniqueName: \"kubernetes.io/projected/e5e8f04f-0814-4db4-8614-ce1180ed65ca-kube-api-access-wz2lr\") pod \"redhat-operators-lw9w7\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.152032 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-utilities\") pod \"redhat-operators-lw9w7\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.152063 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-catalog-content\") pod \"redhat-operators-lw9w7\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.152543 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-catalog-content\") pod \"redhat-operators-lw9w7\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.152786 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-utilities\") pod \"redhat-operators-lw9w7\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.180677 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz2lr\" (UniqueName: \"kubernetes.io/projected/e5e8f04f-0814-4db4-8614-ce1180ed65ca-kube-api-access-wz2lr\") pod \"redhat-operators-lw9w7\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.302612 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.346370 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ms7wt"] Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.347694 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.356612 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js9j9\" (UniqueName: \"kubernetes.io/projected/57144ff1-c845-4eee-a2fa-d69535c46ca2-kube-api-access-js9j9\") pod \"redhat-operators-ms7wt\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.356650 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-catalog-content\") pod \"redhat-operators-ms7wt\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.356675 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-utilities\") pod \"redhat-operators-ms7wt\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.360375 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ms7wt"] Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.458179 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js9j9\" (UniqueName: \"kubernetes.io/projected/57144ff1-c845-4eee-a2fa-d69535c46ca2-kube-api-access-js9j9\") pod \"redhat-operators-ms7wt\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.458221 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-catalog-content\") pod \"redhat-operators-ms7wt\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.458244 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-utilities\") pod \"redhat-operators-ms7wt\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.458774 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-utilities\") pod \"redhat-operators-ms7wt\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.458977 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-catalog-content\") pod \"redhat-operators-ms7wt\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.479895 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js9j9\" (UniqueName: \"kubernetes.io/projected/57144ff1-c845-4eee-a2fa-d69535c46ca2-kube-api-access-js9j9\") pod \"redhat-operators-ms7wt\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.693257 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" event={"ID":"104a5cfb-7f2a-48d1-be00-10f698d0b552","Type":"ContainerStarted","Data":"d17bf8df2a847e83ff1648983d2bc378cb485b27f7b546e18a90df372fd289b3"} Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.693676 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.695204 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4","Type":"ContainerStarted","Data":"736e59b03895c9d1ca8757326aeec4e7c87ff27f1f7bdd7d47b216e5a842b6b0"} Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.695246 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4","Type":"ContainerStarted","Data":"65b900723c413da86c7e8089702f07242924e92657f21c9d49b0df887d41e563"} Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.696614 4922 generic.go:334] "Generic (PLEG): container finished" podID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerID="1cbbfbaae6c0b9072abd77f8de7fe3c066d9ed03882fd1c2fdfbcdddf850de23" exitCode=0 Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.696768 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2bmls" event={"ID":"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91","Type":"ContainerDied","Data":"1cbbfbaae6c0b9072abd77f8de7fe3c066d9ed03882fd1c2fdfbcdddf850de23"} Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.698333 4922 generic.go:334] "Generic (PLEG): container finished" podID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerID="00d55fa62e739d923f0fad0ea071b288ff87291c71abb2a02117f765c0abf1a2" exitCode=0 Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.699417 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ntnpl" event={"ID":"941561c9-c666-4f06-a30c-914d9db9ce9e","Type":"ContainerDied","Data":"00d55fa62e739d923f0fad0ea071b288ff87291c71abb2a02117f765c0abf1a2"} Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.699442 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ntnpl" event={"ID":"941561c9-c666-4f06-a30c-914d9db9ce9e","Type":"ContainerStarted","Data":"8e2d583530fcb82740764cd75ca955c263f85578cee574d827b5373be6ffdad7"} Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.707302 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.715828 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" podStartSLOduration=130.715796957 podStartE2EDuration="2m10.715796957s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:07.708651216 +0000 UTC m=+152.018940039" watchObservedRunningTime="2025-09-29 22:29:07.715796957 +0000 UTC m=+152.026085780" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.733751 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.7337079499999999 podStartE2EDuration="1.73370795s" podCreationTimestamp="2025-09-29 22:29:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:07.730427167 +0000 UTC m=+152.040715980" watchObservedRunningTime="2025-09-29 22:29:07.73370795 +0000 UTC m=+152.043996763" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.877843 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lw9w7"] Sep 29 22:29:07 crc kubenswrapper[4922]: W0929 22:29:07.884142 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5e8f04f_0814_4db4_8614_ce1180ed65ca.slice/crio-05a770eeb9f982e7856411703eadd8d9cccd33e68b848a8b5171ae59e520d907 WatchSource:0}: Error finding container 05a770eeb9f982e7856411703eadd8d9cccd33e68b848a8b5171ae59e520d907: Status 404 returned error can't find the container with id 05a770eeb9f982e7856411703eadd8d9cccd33e68b848a8b5171ae59e520d907 Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.920917 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:29:07 crc kubenswrapper[4922]: I0929 22:29:07.926690 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-lzx9z" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.034370 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.040447 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:08 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:08 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:08 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.040516 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.066353 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ms7wt"] Sep 29 22:29:08 crc kubenswrapper[4922]: W0929 22:29:08.093582 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57144ff1_c845_4eee_a2fa_d69535c46ca2.slice/crio-c501d39f30e048f562592e29ae2b3519cefb2c60038a5e2cb47204ae2dbeab77 WatchSource:0}: Error finding container c501d39f30e048f562592e29ae2b3519cefb2c60038a5e2cb47204ae2dbeab77: Status 404 returned error can't find the container with id c501d39f30e048f562592e29ae2b3519cefb2c60038a5e2cb47204ae2dbeab77 Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.304780 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.304823 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.306670 4922 patch_prober.go:28] interesting pod/downloads-7954f5f757-p6rml container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.306720 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-p6rml" podUID="a30ab676-362b-42ca-9eb0-8d42e4196078" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.306807 4922 patch_prober.go:28] interesting pod/downloads-7954f5f757-p6rml container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" start-of-body= Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.306874 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-p6rml" podUID="a30ab676-362b-42ca-9eb0-8d42e4196078" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.15:8080/\": dial tcp 10.217.0.15:8080: connect: connection refused" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.307641 4922 patch_prober.go:28] interesting pod/console-f9d7485db-fq7mw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.26:8443/health\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.307668 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-fq7mw" podUID="ddedd179-84f4-4532-9d1b-eed45990a6e2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.26:8443/health\": dial tcp 10.217.0.26:8443: connect: connection refused" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.449025 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.449737 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.452384 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.452563 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.452642 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.577004 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/447759c3-947c-4d27-9989-120f5358131e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"447759c3-947c-4d27-9989-120f5358131e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.577333 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/447759c3-947c-4d27-9989-120f5358131e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"447759c3-947c-4d27-9989-120f5358131e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.678721 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/447759c3-947c-4d27-9989-120f5358131e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"447759c3-947c-4d27-9989-120f5358131e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.678801 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/447759c3-947c-4d27-9989-120f5358131e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"447759c3-947c-4d27-9989-120f5358131e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.678989 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/447759c3-947c-4d27-9989-120f5358131e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"447759c3-947c-4d27-9989-120f5358131e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.706984 4922 generic.go:334] "Generic (PLEG): container finished" podID="c6dd3806-28e6-47bc-9ddf-c59a48cc18e4" containerID="736e59b03895c9d1ca8757326aeec4e7c87ff27f1f7bdd7d47b216e5a842b6b0" exitCode=0 Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.707650 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4","Type":"ContainerDied","Data":"736e59b03895c9d1ca8757326aeec4e7c87ff27f1f7bdd7d47b216e5a842b6b0"} Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.713007 4922 generic.go:334] "Generic (PLEG): container finished" podID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerID="f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530" exitCode=0 Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.713075 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lw9w7" event={"ID":"e5e8f04f-0814-4db4-8614-ce1180ed65ca","Type":"ContainerDied","Data":"f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530"} Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.713104 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lw9w7" event={"ID":"e5e8f04f-0814-4db4-8614-ce1180ed65ca","Type":"ContainerStarted","Data":"05a770eeb9f982e7856411703eadd8d9cccd33e68b848a8b5171ae59e520d907"} Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.716341 4922 generic.go:334] "Generic (PLEG): container finished" podID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerID="719e6c9968ce99c746377175154453274ce12819e5e16f322642e2f703e7b183" exitCode=0 Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.717294 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms7wt" event={"ID":"57144ff1-c845-4eee-a2fa-d69535c46ca2","Type":"ContainerDied","Data":"719e6c9968ce99c746377175154453274ce12819e5e16f322642e2f703e7b183"} Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.717318 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms7wt" event={"ID":"57144ff1-c845-4eee-a2fa-d69535c46ca2","Type":"ContainerStarted","Data":"c501d39f30e048f562592e29ae2b3519cefb2c60038a5e2cb47204ae2dbeab77"} Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.740273 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/447759c3-947c-4d27-9989-120f5358131e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"447759c3-947c-4d27-9989-120f5358131e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:08 crc kubenswrapper[4922]: I0929 22:29:08.764929 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:09 crc kubenswrapper[4922]: I0929 22:29:09.047023 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:09 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:09 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:09 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:09 crc kubenswrapper[4922]: I0929 22:29:09.047080 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:09 crc kubenswrapper[4922]: I0929 22:29:09.138480 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 22:29:09 crc kubenswrapper[4922]: I0929 22:29:09.144181 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:29:09 crc kubenswrapper[4922]: I0929 22:29:09.730436 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"447759c3-947c-4d27-9989-120f5358131e","Type":"ContainerStarted","Data":"d349698261d611e5dd89fa236dad8f14daba0f17627d137c9b5c25e9bdc5c285"} Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.036889 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:10 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:10 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:10 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.037191 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.069569 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.200630 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kubelet-dir\") pod \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\" (UID: \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\") " Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.200702 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kube-api-access\") pod \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\" (UID: \"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4\") " Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.200771 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c6dd3806-28e6-47bc-9ddf-c59a48cc18e4" (UID: "c6dd3806-28e6-47bc-9ddf-c59a48cc18e4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.201158 4922 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.218631 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c6dd3806-28e6-47bc-9ddf-c59a48cc18e4" (UID: "c6dd3806-28e6-47bc-9ddf-c59a48cc18e4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.302658 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c6dd3806-28e6-47bc-9ddf-c59a48cc18e4-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.550180 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-6mvkp" Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.745686 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c6dd3806-28e6-47bc-9ddf-c59a48cc18e4","Type":"ContainerDied","Data":"65b900723c413da86c7e8089702f07242924e92657f21c9d49b0df887d41e563"} Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.745723 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65b900723c413da86c7e8089702f07242924e92657f21c9d49b0df887d41e563" Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.745724 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.750574 4922 generic.go:334] "Generic (PLEG): container finished" podID="447759c3-947c-4d27-9989-120f5358131e" containerID="53676d486c8008d195ae5426ac8e2b851bbd93d78afb05a565962b809b162bd1" exitCode=0 Sep 29 22:29:10 crc kubenswrapper[4922]: I0929 22:29:10.750614 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"447759c3-947c-4d27-9989-120f5358131e","Type":"ContainerDied","Data":"53676d486c8008d195ae5426ac8e2b851bbd93d78afb05a565962b809b162bd1"} Sep 29 22:29:11 crc kubenswrapper[4922]: I0929 22:29:11.036021 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:11 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:11 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:11 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:11 crc kubenswrapper[4922]: I0929 22:29:11.036078 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:12 crc kubenswrapper[4922]: I0929 22:29:12.036255 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:12 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:12 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:12 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:12 crc kubenswrapper[4922]: I0929 22:29:12.036673 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:13 crc kubenswrapper[4922]: I0929 22:29:13.036788 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:13 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:13 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:13 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:13 crc kubenswrapper[4922]: I0929 22:29:13.036849 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:14 crc kubenswrapper[4922]: I0929 22:29:14.036006 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:14 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:14 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:14 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:14 crc kubenswrapper[4922]: I0929 22:29:14.036281 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:15 crc kubenswrapper[4922]: I0929 22:29:15.040598 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:15 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:15 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:15 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:15 crc kubenswrapper[4922]: I0929 22:29:15.040651 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:16 crc kubenswrapper[4922]: I0929 22:29:16.035589 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:16 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:16 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:16 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:16 crc kubenswrapper[4922]: I0929 22:29:16.035917 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:17 crc kubenswrapper[4922]: I0929 22:29:17.036331 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:17 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:17 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:17 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:17 crc kubenswrapper[4922]: I0929 22:29:17.036513 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:18 crc kubenswrapper[4922]: I0929 22:29:18.039261 4922 patch_prober.go:28] interesting pod/router-default-5444994796-2v5hs container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 22:29:18 crc kubenswrapper[4922]: [-]has-synced failed: reason withheld Sep 29 22:29:18 crc kubenswrapper[4922]: [+]process-running ok Sep 29 22:29:18 crc kubenswrapper[4922]: healthz check failed Sep 29 22:29:18 crc kubenswrapper[4922]: I0929 22:29:18.039342 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-2v5hs" podUID="2e5ac2f8-cc73-4c9d-88d2-22bd6db0348e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:29:18 crc kubenswrapper[4922]: I0929 22:29:18.300278 4922 patch_prober.go:28] interesting pod/console-f9d7485db-fq7mw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.26:8443/health\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Sep 29 22:29:18 crc kubenswrapper[4922]: I0929 22:29:18.300812 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-fq7mw" podUID="ddedd179-84f4-4532-9d1b-eed45990a6e2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.26:8443/health\": dial tcp 10.217.0.26:8443: connect: connection refused" Sep 29 22:29:18 crc kubenswrapper[4922]: I0929 22:29:18.317040 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-p6rml" Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.037905 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.041649 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-2v5hs" Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.791048 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.831715 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.831853 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"447759c3-947c-4d27-9989-120f5358131e","Type":"ContainerDied","Data":"d349698261d611e5dd89fa236dad8f14daba0f17627d137c9b5c25e9bdc5c285"} Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.831879 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d349698261d611e5dd89fa236dad8f14daba0f17627d137c9b5c25e9bdc5c285" Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.844427 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/447759c3-947c-4d27-9989-120f5358131e-kubelet-dir\") pod \"447759c3-947c-4d27-9989-120f5358131e\" (UID: \"447759c3-947c-4d27-9989-120f5358131e\") " Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.844476 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/447759c3-947c-4d27-9989-120f5358131e-kube-api-access\") pod \"447759c3-947c-4d27-9989-120f5358131e\" (UID: \"447759c3-947c-4d27-9989-120f5358131e\") " Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.844574 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/447759c3-947c-4d27-9989-120f5358131e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "447759c3-947c-4d27-9989-120f5358131e" (UID: "447759c3-947c-4d27-9989-120f5358131e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.844880 4922 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/447759c3-947c-4d27-9989-120f5358131e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.851730 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/447759c3-947c-4d27-9989-120f5358131e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "447759c3-947c-4d27-9989-120f5358131e" (UID: "447759c3-947c-4d27-9989-120f5358131e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:29:19 crc kubenswrapper[4922]: I0929 22:29:19.946372 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/447759c3-947c-4d27-9989-120f5358131e-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:20 crc kubenswrapper[4922]: I0929 22:29:20.352234 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:29:20 crc kubenswrapper[4922]: I0929 22:29:20.363383 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51c5d7b9-741c-448f-b19e-9441e62a48c6-metrics-certs\") pod \"network-metrics-daemon-gkfvg\" (UID: \"51c5d7b9-741c-448f-b19e-9441e62a48c6\") " pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:29:20 crc kubenswrapper[4922]: I0929 22:29:20.394076 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-gkfvg" Sep 29 22:29:26 crc kubenswrapper[4922]: I0929 22:29:26.136758 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:29:28 crc kubenswrapper[4922]: I0929 22:29:28.306372 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:29:28 crc kubenswrapper[4922]: I0929 22:29:28.312928 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:29:28 crc kubenswrapper[4922]: I0929 22:29:28.913127 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:29:28 crc kubenswrapper[4922]: I0929 22:29:28.913440 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:29:32 crc kubenswrapper[4922]: E0929 22:29:32.562222 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 22:29:32 crc kubenswrapper[4922]: E0929 22:29:32.562448 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8264q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-22bjg_openshift-marketplace(f8c0a27c-3686-4b44-8a91-45744aa8e551): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:29:32 crc kubenswrapper[4922]: E0929 22:29:32.563609 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-22bjg" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" Sep 29 22:29:35 crc kubenswrapper[4922]: E0929 22:29:35.135041 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-22bjg" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" Sep 29 22:29:35 crc kubenswrapper[4922]: E0929 22:29:35.238022 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 22:29:35 crc kubenswrapper[4922]: E0929 22:29:35.238243 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6dlhv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-jd4wx_openshift-marketplace(86303f4e-f5e5-4779-b045-1f56a0ef1b32): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:29:35 crc kubenswrapper[4922]: E0929 22:29:35.239727 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-jd4wx" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" Sep 29 22:29:38 crc kubenswrapper[4922]: E0929 22:29:38.464600 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 22:29:38 crc kubenswrapper[4922]: E0929 22:29:38.465109 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-js9j9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-ms7wt_openshift-marketplace(57144ff1-c845-4eee-a2fa-d69535c46ca2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:29:38 crc kubenswrapper[4922]: E0929 22:29:38.467550 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-ms7wt" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" Sep 29 22:29:38 crc kubenswrapper[4922]: I0929 22:29:38.782433 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vr4md" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.081583 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-jd4wx" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.081624 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-ms7wt" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.202986 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.203339 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r7qcf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ntnpl_openshift-marketplace(941561c9-c666-4f06-a30c-914d9db9ce9e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.206216 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-ntnpl" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.211080 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.211247 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-62qhl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-2bmls_openshift-marketplace(e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.212645 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-2bmls" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.261333 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.261513 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wz2lr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-lw9w7_openshift-marketplace(e5e8f04f-0814-4db4-8614-ce1180ed65ca): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.262763 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-lw9w7" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.280712 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.280848 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-74mcv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-4k5qr_openshift-marketplace(ce910279-f40a-4b94-9be2-718aa508e1d1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.282572 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-4k5qr" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.354588 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.354780 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pb2wl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-c7r4b_openshift-marketplace(a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.356467 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-c7r4b" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" Sep 29 22:29:39 crc kubenswrapper[4922]: I0929 22:29:39.510291 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-gkfvg"] Sep 29 22:29:39 crc kubenswrapper[4922]: W0929 22:29:39.517274 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51c5d7b9_741c_448f_b19e_9441e62a48c6.slice/crio-f068c3273f4d2911a45b762938b46ba278c3374a992875663fa06fe46b1c5bc7 WatchSource:0}: Error finding container f068c3273f4d2911a45b762938b46ba278c3374a992875663fa06fe46b1c5bc7: Status 404 returned error can't find the container with id f068c3273f4d2911a45b762938b46ba278c3374a992875663fa06fe46b1c5bc7 Sep 29 22:29:39 crc kubenswrapper[4922]: I0929 22:29:39.954883 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" event={"ID":"51c5d7b9-741c-448f-b19e-9441e62a48c6","Type":"ContainerStarted","Data":"fb56340dc4d262b4b1018cf833a431f5f9e50349a944720ddd40ba5882abe2f2"} Sep 29 22:29:39 crc kubenswrapper[4922]: I0929 22:29:39.955374 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" event={"ID":"51c5d7b9-741c-448f-b19e-9441e62a48c6","Type":"ContainerStarted","Data":"f068c3273f4d2911a45b762938b46ba278c3374a992875663fa06fe46b1c5bc7"} Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.956975 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-lw9w7" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.960603 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-4k5qr" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.960660 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ntnpl" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.960779 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-c7r4b" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" Sep 29 22:29:39 crc kubenswrapper[4922]: E0929 22:29:39.961723 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-2bmls" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" Sep 29 22:29:40 crc kubenswrapper[4922]: I0929 22:29:40.962816 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-gkfvg" event={"ID":"51c5d7b9-741c-448f-b19e-9441e62a48c6","Type":"ContainerStarted","Data":"e30a2bb261de6d0cc34224b8806d8432179982f7c06d0cc159699626a5f0cf92"} Sep 29 22:29:40 crc kubenswrapper[4922]: I0929 22:29:40.993838 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-gkfvg" podStartSLOduration=163.993806224 podStartE2EDuration="2m43.993806224s" podCreationTimestamp="2025-09-29 22:26:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:29:40.989939826 +0000 UTC m=+185.300228669" watchObservedRunningTime="2025-09-29 22:29:40.993806224 +0000 UTC m=+185.304095077" Sep 29 22:29:44 crc kubenswrapper[4922]: I0929 22:29:44.490565 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 22:29:49 crc kubenswrapper[4922]: I0929 22:29:49.023163 4922 generic.go:334] "Generic (PLEG): container finished" podID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerID="09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f" exitCode=0 Sep 29 22:29:49 crc kubenswrapper[4922]: I0929 22:29:49.023247 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bjg" event={"ID":"f8c0a27c-3686-4b44-8a91-45744aa8e551","Type":"ContainerDied","Data":"09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f"} Sep 29 22:29:50 crc kubenswrapper[4922]: I0929 22:29:50.032356 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bjg" event={"ID":"f8c0a27c-3686-4b44-8a91-45744aa8e551","Type":"ContainerStarted","Data":"024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819"} Sep 29 22:29:50 crc kubenswrapper[4922]: I0929 22:29:50.068372 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-22bjg" podStartSLOduration=2.30366269 podStartE2EDuration="46.068342861s" podCreationTimestamp="2025-09-29 22:29:04 +0000 UTC" firstStartedPulling="2025-09-29 22:29:05.671648366 +0000 UTC m=+149.981937189" lastFinishedPulling="2025-09-29 22:29:49.436328517 +0000 UTC m=+193.746617360" observedRunningTime="2025-09-29 22:29:50.060874032 +0000 UTC m=+194.371162885" watchObservedRunningTime="2025-09-29 22:29:50.068342861 +0000 UTC m=+194.378631684" Sep 29 22:29:51 crc kubenswrapper[4922]: I0929 22:29:51.040524 4922 generic.go:334] "Generic (PLEG): container finished" podID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerID="da692710ffb0df2641f9a9ef63eb637da3ae72fa28481e702ec71429b586c929" exitCode=0 Sep 29 22:29:51 crc kubenswrapper[4922]: I0929 22:29:51.040572 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms7wt" event={"ID":"57144ff1-c845-4eee-a2fa-d69535c46ca2","Type":"ContainerDied","Data":"da692710ffb0df2641f9a9ef63eb637da3ae72fa28481e702ec71429b586c929"} Sep 29 22:29:52 crc kubenswrapper[4922]: I0929 22:29:52.047467 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms7wt" event={"ID":"57144ff1-c845-4eee-a2fa-d69535c46ca2","Type":"ContainerStarted","Data":"7660ea69b3a7f660d84532143f363089d14ec1b3f3c77b0985d34271576e2f8c"} Sep 29 22:29:52 crc kubenswrapper[4922]: I0929 22:29:52.068576 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ms7wt" podStartSLOduration=2.233473046 podStartE2EDuration="45.06855771s" podCreationTimestamp="2025-09-29 22:29:07 +0000 UTC" firstStartedPulling="2025-09-29 22:29:08.717942707 +0000 UTC m=+153.028231520" lastFinishedPulling="2025-09-29 22:29:51.553027331 +0000 UTC m=+195.863316184" observedRunningTime="2025-09-29 22:29:52.064783607 +0000 UTC m=+196.375072440" watchObservedRunningTime="2025-09-29 22:29:52.06855771 +0000 UTC m=+196.378846533" Sep 29 22:29:53 crc kubenswrapper[4922]: I0929 22:29:53.058741 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jd4wx" event={"ID":"86303f4e-f5e5-4779-b045-1f56a0ef1b32","Type":"ContainerStarted","Data":"2a18e34bcfd84a57a96abca7582095ffded82e1c8003d785ef7f4bb36d22d413"} Sep 29 22:29:53 crc kubenswrapper[4922]: I0929 22:29:53.060288 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k5qr" event={"ID":"ce910279-f40a-4b94-9be2-718aa508e1d1","Type":"ContainerStarted","Data":"83d623ea7944d4d3c29e399844237e6483ad411fd243c8b34ce5f7f24400ed91"} Sep 29 22:29:54 crc kubenswrapper[4922]: I0929 22:29:54.070055 4922 generic.go:334] "Generic (PLEG): container finished" podID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerID="83d623ea7944d4d3c29e399844237e6483ad411fd243c8b34ce5f7f24400ed91" exitCode=0 Sep 29 22:29:54 crc kubenswrapper[4922]: I0929 22:29:54.070230 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k5qr" event={"ID":"ce910279-f40a-4b94-9be2-718aa508e1d1","Type":"ContainerDied","Data":"83d623ea7944d4d3c29e399844237e6483ad411fd243c8b34ce5f7f24400ed91"} Sep 29 22:29:54 crc kubenswrapper[4922]: I0929 22:29:54.075195 4922 generic.go:334] "Generic (PLEG): container finished" podID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerID="2a18e34bcfd84a57a96abca7582095ffded82e1c8003d785ef7f4bb36d22d413" exitCode=0 Sep 29 22:29:54 crc kubenswrapper[4922]: I0929 22:29:54.075242 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jd4wx" event={"ID":"86303f4e-f5e5-4779-b045-1f56a0ef1b32","Type":"ContainerDied","Data":"2a18e34bcfd84a57a96abca7582095ffded82e1c8003d785ef7f4bb36d22d413"} Sep 29 22:29:54 crc kubenswrapper[4922]: I0929 22:29:54.690438 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:54 crc kubenswrapper[4922]: I0929 22:29:54.690535 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:54 crc kubenswrapper[4922]: I0929 22:29:54.879493 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:55 crc kubenswrapper[4922]: I0929 22:29:55.084917 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k5qr" event={"ID":"ce910279-f40a-4b94-9be2-718aa508e1d1","Type":"ContainerStarted","Data":"bee628bd9bf65205aac2edbdbd52bddd2078bff258b0a3041309904241a8f4e8"} Sep 29 22:29:55 crc kubenswrapper[4922]: I0929 22:29:55.094674 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jd4wx" event={"ID":"86303f4e-f5e5-4779-b045-1f56a0ef1b32","Type":"ContainerStarted","Data":"78afc93a0982466de84f9c1fbe6c1b5eca2a160fda731c67caba5ad95ffb9b72"} Sep 29 22:29:55 crc kubenswrapper[4922]: I0929 22:29:55.109186 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4k5qr" podStartSLOduration=3.204279306 podStartE2EDuration="52.109158116s" podCreationTimestamp="2025-09-29 22:29:03 +0000 UTC" firstStartedPulling="2025-09-29 22:29:05.66709479 +0000 UTC m=+149.977383603" lastFinishedPulling="2025-09-29 22:29:54.57197359 +0000 UTC m=+198.882262413" observedRunningTime="2025-09-29 22:29:55.10486882 +0000 UTC m=+199.415157653" watchObservedRunningTime="2025-09-29 22:29:55.109158116 +0000 UTC m=+199.419446959" Sep 29 22:29:55 crc kubenswrapper[4922]: I0929 22:29:55.127985 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jd4wx" podStartSLOduration=3.283406174 podStartE2EDuration="51.127968882s" podCreationTimestamp="2025-09-29 22:29:04 +0000 UTC" firstStartedPulling="2025-09-29 22:29:06.686437287 +0000 UTC m=+150.996726100" lastFinishedPulling="2025-09-29 22:29:54.530999995 +0000 UTC m=+198.841288808" observedRunningTime="2025-09-29 22:29:55.127802638 +0000 UTC m=+199.438091471" watchObservedRunningTime="2025-09-29 22:29:55.127968882 +0000 UTC m=+199.438257695" Sep 29 22:29:55 crc kubenswrapper[4922]: I0929 22:29:55.145635 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:56 crc kubenswrapper[4922]: I0929 22:29:56.100604 4922 generic.go:334] "Generic (PLEG): container finished" podID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerID="ae26d03ef3bb56eccaa8d581377cf9ad4407d96f0c7eb259151dbf92c8446f7e" exitCode=0 Sep 29 22:29:56 crc kubenswrapper[4922]: I0929 22:29:56.100629 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2bmls" event={"ID":"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91","Type":"ContainerDied","Data":"ae26d03ef3bb56eccaa8d581377cf9ad4407d96f0c7eb259151dbf92c8446f7e"} Sep 29 22:29:56 crc kubenswrapper[4922]: I0929 22:29:56.106507 4922 generic.go:334] "Generic (PLEG): container finished" podID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerID="fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3" exitCode=0 Sep 29 22:29:56 crc kubenswrapper[4922]: I0929 22:29:56.106582 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lw9w7" event={"ID":"e5e8f04f-0814-4db4-8614-ce1180ed65ca","Type":"ContainerDied","Data":"fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3"} Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.113962 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2bmls" event={"ID":"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91","Type":"ContainerStarted","Data":"6f67e839370dd2340215d2f090a74b878ba583bec594cf40b3e049f74600b659"} Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.115889 4922 generic.go:334] "Generic (PLEG): container finished" podID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerID="02e4ff22b0adc621debf63650401e013cabbd60382303bb5be91bcc6ddea11bf" exitCode=0 Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.115927 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c7r4b" event={"ID":"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976","Type":"ContainerDied","Data":"02e4ff22b0adc621debf63650401e013cabbd60382303bb5be91bcc6ddea11bf"} Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.118763 4922 generic.go:334] "Generic (PLEG): container finished" podID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerID="c29e727bdd2d2dc473ab30a45d9788b7dd370c3f26f9bc56919d4e1999e2d27a" exitCode=0 Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.118946 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ntnpl" event={"ID":"941561c9-c666-4f06-a30c-914d9db9ce9e","Type":"ContainerDied","Data":"c29e727bdd2d2dc473ab30a45d9788b7dd370c3f26f9bc56919d4e1999e2d27a"} Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.121031 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lw9w7" event={"ID":"e5e8f04f-0814-4db4-8614-ce1180ed65ca","Type":"ContainerStarted","Data":"a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974"} Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.137465 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2bmls" podStartSLOduration=3.201183412 podStartE2EDuration="52.137449126s" podCreationTimestamp="2025-09-29 22:29:05 +0000 UTC" firstStartedPulling="2025-09-29 22:29:07.702058319 +0000 UTC m=+152.012347132" lastFinishedPulling="2025-09-29 22:29:56.638324033 +0000 UTC m=+200.948612846" observedRunningTime="2025-09-29 22:29:57.135761334 +0000 UTC m=+201.446050147" watchObservedRunningTime="2025-09-29 22:29:57.137449126 +0000 UTC m=+201.447737939" Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.187596 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lw9w7" podStartSLOduration=3.328906149 podStartE2EDuration="51.187582178s" podCreationTimestamp="2025-09-29 22:29:06 +0000 UTC" firstStartedPulling="2025-09-29 22:29:08.715350941 +0000 UTC m=+153.025639754" lastFinishedPulling="2025-09-29 22:29:56.57402697 +0000 UTC m=+200.884315783" observedRunningTime="2025-09-29 22:29:57.172069444 +0000 UTC m=+201.482358257" watchObservedRunningTime="2025-09-29 22:29:57.187582178 +0000 UTC m=+201.497870991" Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.303118 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.303163 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.708301 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.709071 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:57 crc kubenswrapper[4922]: I0929 22:29:57.771354 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.127049 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c7r4b" event={"ID":"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976","Type":"ContainerStarted","Data":"9aea3635edb0e2911cb0dd5db6248f529915ea256bf8f487e5b2d4473b443d76"} Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.132992 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ntnpl" event={"ID":"941561c9-c666-4f06-a30c-914d9db9ce9e","Type":"ContainerStarted","Data":"29d6a3d97a43beb5bf2424d5dd2d7e164a353da451ecc82c136d1efd12be3991"} Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.153381 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c7r4b" podStartSLOduration=3.178422512 podStartE2EDuration="55.15335772s" podCreationTimestamp="2025-09-29 22:29:03 +0000 UTC" firstStartedPulling="2025-09-29 22:29:05.659733244 +0000 UTC m=+149.970022097" lastFinishedPulling="2025-09-29 22:29:57.634668492 +0000 UTC m=+201.944957305" observedRunningTime="2025-09-29 22:29:58.151929075 +0000 UTC m=+202.462217888" watchObservedRunningTime="2025-09-29 22:29:58.15335772 +0000 UTC m=+202.463646533" Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.174379 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ntnpl" podStartSLOduration=2.343634371 podStartE2EDuration="52.174363341s" podCreationTimestamp="2025-09-29 22:29:06 +0000 UTC" firstStartedPulling="2025-09-29 22:29:07.702055089 +0000 UTC m=+152.012343902" lastFinishedPulling="2025-09-29 22:29:57.532784019 +0000 UTC m=+201.843072872" observedRunningTime="2025-09-29 22:29:58.17150595 +0000 UTC m=+202.481794753" watchObservedRunningTime="2025-09-29 22:29:58.174363341 +0000 UTC m=+202.484652154" Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.200440 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.358580 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lw9w7" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerName="registry-server" probeResult="failure" output=< Sep 29 22:29:58 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 29 22:29:58 crc kubenswrapper[4922]: > Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.672737 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-22bjg"] Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.673082 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-22bjg" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerName="registry-server" containerID="cri-o://024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819" gracePeriod=2 Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.913766 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.913865 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.913946 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.914933 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:29:58 crc kubenswrapper[4922]: I0929 22:29:58.915142 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59" gracePeriod=600 Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.014935 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.086429 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-utilities\") pod \"f8c0a27c-3686-4b44-8a91-45744aa8e551\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.086717 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8264q\" (UniqueName: \"kubernetes.io/projected/f8c0a27c-3686-4b44-8a91-45744aa8e551-kube-api-access-8264q\") pod \"f8c0a27c-3686-4b44-8a91-45744aa8e551\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.086739 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-catalog-content\") pod \"f8c0a27c-3686-4b44-8a91-45744aa8e551\" (UID: \"f8c0a27c-3686-4b44-8a91-45744aa8e551\") " Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.087223 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-utilities" (OuterVolumeSpecName: "utilities") pod "f8c0a27c-3686-4b44-8a91-45744aa8e551" (UID: "f8c0a27c-3686-4b44-8a91-45744aa8e551"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.092110 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8c0a27c-3686-4b44-8a91-45744aa8e551-kube-api-access-8264q" (OuterVolumeSpecName: "kube-api-access-8264q") pod "f8c0a27c-3686-4b44-8a91-45744aa8e551" (UID: "f8c0a27c-3686-4b44-8a91-45744aa8e551"). InnerVolumeSpecName "kube-api-access-8264q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.135919 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f8c0a27c-3686-4b44-8a91-45744aa8e551" (UID: "f8c0a27c-3686-4b44-8a91-45744aa8e551"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.146950 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59" exitCode=0 Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.147020 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59"} Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.148600 4922 generic.go:334] "Generic (PLEG): container finished" podID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerID="024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819" exitCode=0 Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.148639 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22bjg" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.148724 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bjg" event={"ID":"f8c0a27c-3686-4b44-8a91-45744aa8e551","Type":"ContainerDied","Data":"024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819"} Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.148753 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22bjg" event={"ID":"f8c0a27c-3686-4b44-8a91-45744aa8e551","Type":"ContainerDied","Data":"ab7f5e67c3ed1feed7dba40b74a5182b97af18f6c43454cdd54c98a9c060c878"} Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.148772 4922 scope.go:117] "RemoveContainer" containerID="024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.163255 4922 scope.go:117] "RemoveContainer" containerID="09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.180016 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-22bjg"] Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.182347 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-22bjg"] Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.187778 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.187801 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8264q\" (UniqueName: \"kubernetes.io/projected/f8c0a27c-3686-4b44-8a91-45744aa8e551-kube-api-access-8264q\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.187810 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8c0a27c-3686-4b44-8a91-45744aa8e551-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.196122 4922 scope.go:117] "RemoveContainer" containerID="a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.209253 4922 scope.go:117] "RemoveContainer" containerID="024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819" Sep 29 22:29:59 crc kubenswrapper[4922]: E0929 22:29:59.209698 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819\": container with ID starting with 024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819 not found: ID does not exist" containerID="024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.209737 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819"} err="failed to get container status \"024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819\": rpc error: code = NotFound desc = could not find container \"024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819\": container with ID starting with 024cf61336c56ccd7dacbfc29ce65b6a9d74e4f5666bd3b27a0165d52c30c819 not found: ID does not exist" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.209758 4922 scope.go:117] "RemoveContainer" containerID="09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f" Sep 29 22:29:59 crc kubenswrapper[4922]: E0929 22:29:59.209948 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f\": container with ID starting with 09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f not found: ID does not exist" containerID="09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.209969 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f"} err="failed to get container status \"09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f\": rpc error: code = NotFound desc = could not find container \"09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f\": container with ID starting with 09492461c68f86df9b30ec82f2715e4dbd14e755d74337ca742485f1131a905f not found: ID does not exist" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.209983 4922 scope.go:117] "RemoveContainer" containerID="a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957" Sep 29 22:29:59 crc kubenswrapper[4922]: E0929 22:29:59.210184 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957\": container with ID starting with a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957 not found: ID does not exist" containerID="a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.210210 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957"} err="failed to get container status \"a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957\": rpc error: code = NotFound desc = could not find container \"a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957\": container with ID starting with a292d810e69ba0d5c4847c45c4482d372ee860c2cf26ca367189b00589b75957 not found: ID does not exist" Sep 29 22:29:59 crc kubenswrapper[4922]: I0929 22:29:59.855908 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-299nl"] Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.129699 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4"] Sep 29 22:30:00 crc kubenswrapper[4922]: E0929 22:30:00.130178 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="447759c3-947c-4d27-9989-120f5358131e" containerName="pruner" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.130189 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="447759c3-947c-4d27-9989-120f5358131e" containerName="pruner" Sep 29 22:30:00 crc kubenswrapper[4922]: E0929 22:30:00.130203 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerName="extract-utilities" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.130209 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerName="extract-utilities" Sep 29 22:30:00 crc kubenswrapper[4922]: E0929 22:30:00.130223 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6dd3806-28e6-47bc-9ddf-c59a48cc18e4" containerName="pruner" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.130229 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6dd3806-28e6-47bc-9ddf-c59a48cc18e4" containerName="pruner" Sep 29 22:30:00 crc kubenswrapper[4922]: E0929 22:30:00.130237 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerName="registry-server" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.130243 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerName="registry-server" Sep 29 22:30:00 crc kubenswrapper[4922]: E0929 22:30:00.130254 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerName="extract-content" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.130259 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerName="extract-content" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.130370 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6dd3806-28e6-47bc-9ddf-c59a48cc18e4" containerName="pruner" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.130402 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="447759c3-947c-4d27-9989-120f5358131e" containerName="pruner" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.130416 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" containerName="registry-server" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.130757 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.135255 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.135733 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4"] Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.136963 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.155605 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"accabd2cf4dc0ea91bb223aa4ccdc7ab29b4040f94afa9cb51a973916e1e42e0"} Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.198179 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f84b5d8-c622-4a85-b8da-97861b7ede3f-config-volume\") pod \"collect-profiles-29319750-gtdv4\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.198502 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmlkg\" (UniqueName: \"kubernetes.io/projected/8f84b5d8-c622-4a85-b8da-97861b7ede3f-kube-api-access-pmlkg\") pod \"collect-profiles-29319750-gtdv4\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.198646 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f84b5d8-c622-4a85-b8da-97861b7ede3f-secret-volume\") pod \"collect-profiles-29319750-gtdv4\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.299896 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f84b5d8-c622-4a85-b8da-97861b7ede3f-config-volume\") pod \"collect-profiles-29319750-gtdv4\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.299999 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmlkg\" (UniqueName: \"kubernetes.io/projected/8f84b5d8-c622-4a85-b8da-97861b7ede3f-kube-api-access-pmlkg\") pod \"collect-profiles-29319750-gtdv4\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.300038 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f84b5d8-c622-4a85-b8da-97861b7ede3f-secret-volume\") pod \"collect-profiles-29319750-gtdv4\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.301515 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f84b5d8-c622-4a85-b8da-97861b7ede3f-config-volume\") pod \"collect-profiles-29319750-gtdv4\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.309629 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f84b5d8-c622-4a85-b8da-97861b7ede3f-secret-volume\") pod \"collect-profiles-29319750-gtdv4\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.323372 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmlkg\" (UniqueName: \"kubernetes.io/projected/8f84b5d8-c622-4a85-b8da-97861b7ede3f-kube-api-access-pmlkg\") pod \"collect-profiles-29319750-gtdv4\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.430163 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8c0a27c-3686-4b44-8a91-45744aa8e551" path="/var/lib/kubelet/pods/f8c0a27c-3686-4b44-8a91-45744aa8e551/volumes" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.444269 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:00 crc kubenswrapper[4922]: I0929 22:30:00.673827 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4"] Sep 29 22:30:01 crc kubenswrapper[4922]: I0929 22:30:01.162818 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" event={"ID":"8f84b5d8-c622-4a85-b8da-97861b7ede3f","Type":"ContainerStarted","Data":"67c860dfa088042e653b37a25aacb1e73181cbea31d55e2351e92060acdc4b8e"} Sep 29 22:30:02 crc kubenswrapper[4922]: I0929 22:30:02.072192 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ms7wt"] Sep 29 22:30:02 crc kubenswrapper[4922]: I0929 22:30:02.072887 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ms7wt" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerName="registry-server" containerID="cri-o://7660ea69b3a7f660d84532143f363089d14ec1b3f3c77b0985d34271576e2f8c" gracePeriod=2 Sep 29 22:30:02 crc kubenswrapper[4922]: I0929 22:30:02.170460 4922 generic.go:334] "Generic (PLEG): container finished" podID="8f84b5d8-c622-4a85-b8da-97861b7ede3f" containerID="dde905e519db2042b26f09f3a5db1b2366fb264932c3902c5bbadc4a1c1712de" exitCode=0 Sep 29 22:30:02 crc kubenswrapper[4922]: I0929 22:30:02.170962 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" event={"ID":"8f84b5d8-c622-4a85-b8da-97861b7ede3f","Type":"ContainerDied","Data":"dde905e519db2042b26f09f3a5db1b2366fb264932c3902c5bbadc4a1c1712de"} Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.178686 4922 generic.go:334] "Generic (PLEG): container finished" podID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerID="7660ea69b3a7f660d84532143f363089d14ec1b3f3c77b0985d34271576e2f8c" exitCode=0 Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.178771 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms7wt" event={"ID":"57144ff1-c845-4eee-a2fa-d69535c46ca2","Type":"ContainerDied","Data":"7660ea69b3a7f660d84532143f363089d14ec1b3f3c77b0985d34271576e2f8c"} Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.178988 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ms7wt" event={"ID":"57144ff1-c845-4eee-a2fa-d69535c46ca2","Type":"ContainerDied","Data":"c501d39f30e048f562592e29ae2b3519cefb2c60038a5e2cb47204ae2dbeab77"} Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.179016 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c501d39f30e048f562592e29ae2b3519cefb2c60038a5e2cb47204ae2dbeab77" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.182989 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.234072 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-catalog-content\") pod \"57144ff1-c845-4eee-a2fa-d69535c46ca2\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.234147 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-utilities\") pod \"57144ff1-c845-4eee-a2fa-d69535c46ca2\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.234240 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js9j9\" (UniqueName: \"kubernetes.io/projected/57144ff1-c845-4eee-a2fa-d69535c46ca2-kube-api-access-js9j9\") pod \"57144ff1-c845-4eee-a2fa-d69535c46ca2\" (UID: \"57144ff1-c845-4eee-a2fa-d69535c46ca2\") " Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.234976 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-utilities" (OuterVolumeSpecName: "utilities") pod "57144ff1-c845-4eee-a2fa-d69535c46ca2" (UID: "57144ff1-c845-4eee-a2fa-d69535c46ca2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.243261 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57144ff1-c845-4eee-a2fa-d69535c46ca2-kube-api-access-js9j9" (OuterVolumeSpecName: "kube-api-access-js9j9") pod "57144ff1-c845-4eee-a2fa-d69535c46ca2" (UID: "57144ff1-c845-4eee-a2fa-d69535c46ca2"). InnerVolumeSpecName "kube-api-access-js9j9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.336125 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.336153 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js9j9\" (UniqueName: \"kubernetes.io/projected/57144ff1-c845-4eee-a2fa-d69535c46ca2-kube-api-access-js9j9\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.388580 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.436865 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f84b5d8-c622-4a85-b8da-97861b7ede3f-secret-volume\") pod \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.437370 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f84b5d8-c622-4a85-b8da-97861b7ede3f-config-volume\") pod \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.437510 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmlkg\" (UniqueName: \"kubernetes.io/projected/8f84b5d8-c622-4a85-b8da-97861b7ede3f-kube-api-access-pmlkg\") pod \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\" (UID: \"8f84b5d8-c622-4a85-b8da-97861b7ede3f\") " Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.437902 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f84b5d8-c622-4a85-b8da-97861b7ede3f-config-volume" (OuterVolumeSpecName: "config-volume") pod "8f84b5d8-c622-4a85-b8da-97861b7ede3f" (UID: "8f84b5d8-c622-4a85-b8da-97861b7ede3f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.444563 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f84b5d8-c622-4a85-b8da-97861b7ede3f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8f84b5d8-c622-4a85-b8da-97861b7ede3f" (UID: "8f84b5d8-c622-4a85-b8da-97861b7ede3f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.444619 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f84b5d8-c622-4a85-b8da-97861b7ede3f-kube-api-access-pmlkg" (OuterVolumeSpecName: "kube-api-access-pmlkg") pod "8f84b5d8-c622-4a85-b8da-97861b7ede3f" (UID: "8f84b5d8-c622-4a85-b8da-97861b7ede3f"). InnerVolumeSpecName "kube-api-access-pmlkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.538549 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmlkg\" (UniqueName: \"kubernetes.io/projected/8f84b5d8-c622-4a85-b8da-97861b7ede3f-kube-api-access-pmlkg\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.538594 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8f84b5d8-c622-4a85-b8da-97861b7ede3f-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:03 crc kubenswrapper[4922]: I0929 22:30:03.538608 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8f84b5d8-c622-4a85-b8da-97861b7ede3f-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.166790 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.167148 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.186242 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" event={"ID":"8f84b5d8-c622-4a85-b8da-97861b7ede3f","Type":"ContainerDied","Data":"67c860dfa088042e653b37a25aacb1e73181cbea31d55e2351e92060acdc4b8e"} Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.186314 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67c860dfa088042e653b37a25aacb1e73181cbea31d55e2351e92060acdc4b8e" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.186269 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ms7wt" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.186269 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.214355 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.265163 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.305972 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.306014 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.344253 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.468626 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57144ff1-c845-4eee-a2fa-d69535c46ca2" (UID: "57144ff1-c845-4eee-a2fa-d69535c46ca2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.515377 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.516440 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.519461 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ms7wt"] Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.522906 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ms7wt"] Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.549180 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57144ff1-c845-4eee-a2fa-d69535c46ca2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:04 crc kubenswrapper[4922]: I0929 22:30:04.564309 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:30:05 crc kubenswrapper[4922]: I0929 22:30:05.228062 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:30:05 crc kubenswrapper[4922]: I0929 22:30:05.247507 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:30:06 crc kubenswrapper[4922]: I0929 22:30:06.304604 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:30:06 crc kubenswrapper[4922]: I0929 22:30:06.304868 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:30:06 crc kubenswrapper[4922]: I0929 22:30:06.345842 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:30:06 crc kubenswrapper[4922]: I0929 22:30:06.432477 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" path="/var/lib/kubelet/pods/57144ff1-c845-4eee-a2fa-d69535c46ca2/volumes" Sep 29 22:30:06 crc kubenswrapper[4922]: I0929 22:30:06.470805 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jd4wx"] Sep 29 22:30:06 crc kubenswrapper[4922]: I0929 22:30:06.692314 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:30:06 crc kubenswrapper[4922]: I0929 22:30:06.692385 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:30:06 crc kubenswrapper[4922]: I0929 22:30:06.726871 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:30:07 crc kubenswrapper[4922]: I0929 22:30:07.235022 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:30:07 crc kubenswrapper[4922]: I0929 22:30:07.245196 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:30:07 crc kubenswrapper[4922]: I0929 22:30:07.346475 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:30:07 crc kubenswrapper[4922]: I0929 22:30:07.384946 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:30:08 crc kubenswrapper[4922]: I0929 22:30:08.203418 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jd4wx" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerName="registry-server" containerID="cri-o://78afc93a0982466de84f9c1fbe6c1b5eca2a160fda731c67caba5ad95ffb9b72" gracePeriod=2 Sep 29 22:30:08 crc kubenswrapper[4922]: E0929 22:30:08.531661 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86303f4e_f5e5_4779_b045_1f56a0ef1b32.slice/crio-78afc93a0982466de84f9c1fbe6c1b5eca2a160fda731c67caba5ad95ffb9b72.scope\": RecentStats: unable to find data in memory cache]" Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.070850 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ntnpl"] Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.210121 4922 generic.go:334] "Generic (PLEG): container finished" podID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerID="78afc93a0982466de84f9c1fbe6c1b5eca2a160fda731c67caba5ad95ffb9b72" exitCode=0 Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.210198 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jd4wx" event={"ID":"86303f4e-f5e5-4779-b045-1f56a0ef1b32","Type":"ContainerDied","Data":"78afc93a0982466de84f9c1fbe6c1b5eca2a160fda731c67caba5ad95ffb9b72"} Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.210382 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ntnpl" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerName="registry-server" containerID="cri-o://29d6a3d97a43beb5bf2424d5dd2d7e164a353da451ecc82c136d1efd12be3991" gracePeriod=2 Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.410230 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.508102 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-catalog-content\") pod \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.508164 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dlhv\" (UniqueName: \"kubernetes.io/projected/86303f4e-f5e5-4779-b045-1f56a0ef1b32-kube-api-access-6dlhv\") pod \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.508191 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-utilities\") pod \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\" (UID: \"86303f4e-f5e5-4779-b045-1f56a0ef1b32\") " Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.509110 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-utilities" (OuterVolumeSpecName: "utilities") pod "86303f4e-f5e5-4779-b045-1f56a0ef1b32" (UID: "86303f4e-f5e5-4779-b045-1f56a0ef1b32"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.513782 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86303f4e-f5e5-4779-b045-1f56a0ef1b32-kube-api-access-6dlhv" (OuterVolumeSpecName: "kube-api-access-6dlhv") pod "86303f4e-f5e5-4779-b045-1f56a0ef1b32" (UID: "86303f4e-f5e5-4779-b045-1f56a0ef1b32"). InnerVolumeSpecName "kube-api-access-6dlhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.555583 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86303f4e-f5e5-4779-b045-1f56a0ef1b32" (UID: "86303f4e-f5e5-4779-b045-1f56a0ef1b32"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.609925 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.610158 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dlhv\" (UniqueName: \"kubernetes.io/projected/86303f4e-f5e5-4779-b045-1f56a0ef1b32-kube-api-access-6dlhv\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:09 crc kubenswrapper[4922]: I0929 22:30:09.610223 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86303f4e-f5e5-4779-b045-1f56a0ef1b32-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.218445 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jd4wx" event={"ID":"86303f4e-f5e5-4779-b045-1f56a0ef1b32","Type":"ContainerDied","Data":"5f643a88f892305bd9a356e14aa465d67206710f3091c6e0a7dbc22b6cbb40d7"} Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.218482 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jd4wx" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.218853 4922 scope.go:117] "RemoveContainer" containerID="78afc93a0982466de84f9c1fbe6c1b5eca2a160fda731c67caba5ad95ffb9b72" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.221508 4922 generic.go:334] "Generic (PLEG): container finished" podID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerID="29d6a3d97a43beb5bf2424d5dd2d7e164a353da451ecc82c136d1efd12be3991" exitCode=0 Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.221660 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ntnpl" event={"ID":"941561c9-c666-4f06-a30c-914d9db9ce9e","Type":"ContainerDied","Data":"29d6a3d97a43beb5bf2424d5dd2d7e164a353da451ecc82c136d1efd12be3991"} Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.236569 4922 scope.go:117] "RemoveContainer" containerID="2a18e34bcfd84a57a96abca7582095ffded82e1c8003d785ef7f4bb36d22d413" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.244406 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jd4wx"] Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.256359 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jd4wx"] Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.277453 4922 scope.go:117] "RemoveContainer" containerID="7a9df367393bfe5d6072140c4aba458d4432409685a77a751496ae8455c8c324" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.427952 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" path="/var/lib/kubelet/pods/86303f4e-f5e5-4779-b045-1f56a0ef1b32/volumes" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.611836 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.727271 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7qcf\" (UniqueName: \"kubernetes.io/projected/941561c9-c666-4f06-a30c-914d9db9ce9e-kube-api-access-r7qcf\") pod \"941561c9-c666-4f06-a30c-914d9db9ce9e\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.727354 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-utilities\") pod \"941561c9-c666-4f06-a30c-914d9db9ce9e\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.727416 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-catalog-content\") pod \"941561c9-c666-4f06-a30c-914d9db9ce9e\" (UID: \"941561c9-c666-4f06-a30c-914d9db9ce9e\") " Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.728042 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-utilities" (OuterVolumeSpecName: "utilities") pod "941561c9-c666-4f06-a30c-914d9db9ce9e" (UID: "941561c9-c666-4f06-a30c-914d9db9ce9e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.730486 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/941561c9-c666-4f06-a30c-914d9db9ce9e-kube-api-access-r7qcf" (OuterVolumeSpecName: "kube-api-access-r7qcf") pod "941561c9-c666-4f06-a30c-914d9db9ce9e" (UID: "941561c9-c666-4f06-a30c-914d9db9ce9e"). InnerVolumeSpecName "kube-api-access-r7qcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.747456 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "941561c9-c666-4f06-a30c-914d9db9ce9e" (UID: "941561c9-c666-4f06-a30c-914d9db9ce9e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.828948 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7qcf\" (UniqueName: \"kubernetes.io/projected/941561c9-c666-4f06-a30c-914d9db9ce9e-kube-api-access-r7qcf\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.828980 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:10 crc kubenswrapper[4922]: I0929 22:30:10.828991 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/941561c9-c666-4f06-a30c-914d9db9ce9e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:11 crc kubenswrapper[4922]: I0929 22:30:11.227562 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ntnpl" event={"ID":"941561c9-c666-4f06-a30c-914d9db9ce9e","Type":"ContainerDied","Data":"8e2d583530fcb82740764cd75ca955c263f85578cee574d827b5373be6ffdad7"} Sep 29 22:30:11 crc kubenswrapper[4922]: I0929 22:30:11.227609 4922 scope.go:117] "RemoveContainer" containerID="29d6a3d97a43beb5bf2424d5dd2d7e164a353da451ecc82c136d1efd12be3991" Sep 29 22:30:11 crc kubenswrapper[4922]: I0929 22:30:11.227682 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ntnpl" Sep 29 22:30:11 crc kubenswrapper[4922]: I0929 22:30:11.247854 4922 scope.go:117] "RemoveContainer" containerID="c29e727bdd2d2dc473ab30a45d9788b7dd370c3f26f9bc56919d4e1999e2d27a" Sep 29 22:30:11 crc kubenswrapper[4922]: I0929 22:30:11.253665 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ntnpl"] Sep 29 22:30:11 crc kubenswrapper[4922]: I0929 22:30:11.268800 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ntnpl"] Sep 29 22:30:11 crc kubenswrapper[4922]: I0929 22:30:11.272180 4922 scope.go:117] "RemoveContainer" containerID="00d55fa62e739d923f0fad0ea071b288ff87291c71abb2a02117f765c0abf1a2" Sep 29 22:30:12 crc kubenswrapper[4922]: I0929 22:30:12.429198 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" path="/var/lib/kubelet/pods/941561c9-c666-4f06-a30c-914d9db9ce9e/volumes" Sep 29 22:30:24 crc kubenswrapper[4922]: I0929 22:30:24.894497 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" podUID="d68157f0-f55b-45bf-8288-6d0bd26f84de" containerName="oauth-openshift" containerID="cri-o://07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5" gracePeriod=15 Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.241649 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313267 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-idp-0-file-data\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313318 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-session\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313342 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-error\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313362 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-service-ca\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313399 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-policies\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313418 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-login\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313451 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-provider-selection\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313469 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-router-certs\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313520 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-dir\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313542 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-trusted-ca-bundle\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313591 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-cliconfig\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313624 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4j5qr\" (UniqueName: \"kubernetes.io/projected/d68157f0-f55b-45bf-8288-6d0bd26f84de-kube-api-access-4j5qr\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313663 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-serving-cert\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.313680 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-ocp-branding-template\") pod \"d68157f0-f55b-45bf-8288-6d0bd26f84de\" (UID: \"d68157f0-f55b-45bf-8288-6d0bd26f84de\") " Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.314838 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.314861 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.314877 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.314916 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.315337 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.316865 4922 generic.go:334] "Generic (PLEG): container finished" podID="d68157f0-f55b-45bf-8288-6d0bd26f84de" containerID="07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5" exitCode=0 Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.316921 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.316918 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" event={"ID":"d68157f0-f55b-45bf-8288-6d0bd26f84de","Type":"ContainerDied","Data":"07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5"} Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.317530 4922 scope.go:117] "RemoveContainer" containerID="07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.317058 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-299nl" event={"ID":"d68157f0-f55b-45bf-8288-6d0bd26f84de","Type":"ContainerDied","Data":"03cdb0d6cc843ac4e298e670819bdc47aebf8a32403546ff3211db37232c5baf"} Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.320652 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.321344 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.321494 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d68157f0-f55b-45bf-8288-6d0bd26f84de-kube-api-access-4j5qr" (OuterVolumeSpecName: "kube-api-access-4j5qr") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "kube-api-access-4j5qr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.322378 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.323444 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.323803 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.325197 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.325644 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.330126 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "d68157f0-f55b-45bf-8288-6d0bd26f84de" (UID: "d68157f0-f55b-45bf-8288-6d0bd26f84de"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.365425 4922 scope.go:117] "RemoveContainer" containerID="07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5" Sep 29 22:30:25 crc kubenswrapper[4922]: E0929 22:30:25.365954 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5\": container with ID starting with 07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5 not found: ID does not exist" containerID="07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.365983 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5"} err="failed to get container status \"07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5\": rpc error: code = NotFound desc = could not find container \"07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5\": container with ID starting with 07017be0e8c96fb2bcd8d6653625ea049b7939ddf6e2d41ec281ce583e54f4c5 not found: ID does not exist" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414803 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4j5qr\" (UniqueName: \"kubernetes.io/projected/d68157f0-f55b-45bf-8288-6d0bd26f84de-kube-api-access-4j5qr\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414844 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414861 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414876 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414888 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414903 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414914 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414926 4922 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414942 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414955 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414971 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414983 4922 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d68157f0-f55b-45bf-8288-6d0bd26f84de-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.414995 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.415009 4922 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d68157f0-f55b-45bf-8288-6d0bd26f84de-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.644283 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-299nl"] Sep 29 22:30:25 crc kubenswrapper[4922]: I0929 22:30:25.650468 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-299nl"] Sep 29 22:30:26 crc kubenswrapper[4922]: I0929 22:30:26.434944 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d68157f0-f55b-45bf-8288-6d0bd26f84de" path="/var/lib/kubelet/pods/d68157f0-f55b-45bf-8288-6d0bd26f84de/volumes" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.375663 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-c8559d799-rxg46"] Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376036 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerName="registry-server" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376066 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerName="registry-server" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376093 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerName="extract-utilities" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376109 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerName="extract-utilities" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376129 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerName="extract-content" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376143 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerName="extract-content" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376168 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerName="extract-content" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376183 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerName="extract-content" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376204 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f84b5d8-c622-4a85-b8da-97861b7ede3f" containerName="collect-profiles" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376217 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f84b5d8-c622-4a85-b8da-97861b7ede3f" containerName="collect-profiles" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376239 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d68157f0-f55b-45bf-8288-6d0bd26f84de" containerName="oauth-openshift" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376253 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d68157f0-f55b-45bf-8288-6d0bd26f84de" containerName="oauth-openshift" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376275 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerName="registry-server" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376290 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerName="registry-server" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376304 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerName="extract-content" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376318 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerName="extract-content" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376339 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerName="extract-utilities" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376354 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerName="extract-utilities" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376375 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerName="extract-utilities" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376424 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerName="extract-utilities" Sep 29 22:30:28 crc kubenswrapper[4922]: E0929 22:30:28.376445 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerName="registry-server" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376461 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerName="registry-server" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376675 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="941561c9-c666-4f06-a30c-914d9db9ce9e" containerName="registry-server" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376705 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d68157f0-f55b-45bf-8288-6d0bd26f84de" containerName="oauth-openshift" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376727 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="57144ff1-c845-4eee-a2fa-d69535c46ca2" containerName="registry-server" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376749 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="86303f4e-f5e5-4779-b045-1f56a0ef1b32" containerName="registry-server" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.376934 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f84b5d8-c622-4a85-b8da-97861b7ede3f" containerName="collect-profiles" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.378573 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.409665 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.409891 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.411547 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.411853 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.411966 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.412194 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.412458 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.412606 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.412809 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.413680 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.413682 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.413881 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.424723 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.425415 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.428531 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.434865 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-c8559d799-rxg46"] Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.567626 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.568069 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-template-login\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.568490 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.568823 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-service-ca\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569142 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-audit-policies\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569205 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-audit-dir\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569344 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569426 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-router-certs\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569482 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569545 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-session\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569591 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569679 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjszh\" (UniqueName: \"kubernetes.io/projected/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-kube-api-access-wjszh\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569846 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-template-error\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.569901 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.671876 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-service-ca\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.671952 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-audit-policies\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.671988 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-audit-dir\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672048 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672088 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-router-certs\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672135 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672174 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-session\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672218 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672279 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjszh\" (UniqueName: \"kubernetes.io/projected/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-kube-api-access-wjszh\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672327 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-template-error\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672377 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672484 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672558 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-template-login\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.672600 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.673720 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-audit-policies\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.674434 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-audit-dir\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.674934 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.675081 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.675113 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-service-ca\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.683003 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.690421 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.691295 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-template-error\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.694251 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-router-certs\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.696877 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-session\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.697378 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.698487 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.703621 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-v4-0-config-user-template-login\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.707436 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjszh\" (UniqueName: \"kubernetes.io/projected/aaa37c92-9447-4c9c-b0ee-37a6a6da10b3-kube-api-access-wjszh\") pod \"oauth-openshift-c8559d799-rxg46\" (UID: \"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3\") " pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:28 crc kubenswrapper[4922]: I0929 22:30:28.716179 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:29 crc kubenswrapper[4922]: I0929 22:30:29.186957 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-c8559d799-rxg46"] Sep 29 22:30:29 crc kubenswrapper[4922]: I0929 22:30:29.353896 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" event={"ID":"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3","Type":"ContainerStarted","Data":"8007b9cae02c88b730cde817a5af91c14bf1dce98e3f5a0e5829fe333870478a"} Sep 29 22:30:30 crc kubenswrapper[4922]: I0929 22:30:30.362748 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" event={"ID":"aaa37c92-9447-4c9c-b0ee-37a6a6da10b3","Type":"ContainerStarted","Data":"99f2276feb842f7e67b2e6d9b5226e4b4daa6b32556bf2e90e04320f11796b1b"} Sep 29 22:30:30 crc kubenswrapper[4922]: I0929 22:30:30.363140 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:30 crc kubenswrapper[4922]: I0929 22:30:30.369592 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" Sep 29 22:30:30 crc kubenswrapper[4922]: I0929 22:30:30.390314 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-c8559d799-rxg46" podStartSLOduration=31.390292504 podStartE2EDuration="31.390292504s" podCreationTimestamp="2025-09-29 22:29:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:30:30.388361496 +0000 UTC m=+234.698650409" watchObservedRunningTime="2025-09-29 22:30:30.390292504 +0000 UTC m=+234.700581357" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.337401 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c7r4b"] Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.337968 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c7r4b" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerName="registry-server" containerID="cri-o://9aea3635edb0e2911cb0dd5db6248f529915ea256bf8f487e5b2d4473b443d76" gracePeriod=30 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.349653 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4k5qr"] Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.349848 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4k5qr" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerName="registry-server" containerID="cri-o://bee628bd9bf65205aac2edbdbd52bddd2078bff258b0a3041309904241a8f4e8" gracePeriod=30 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.364301 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xb8f7"] Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.364537 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" podUID="d51b12db-7451-4cf2-bf6e-a156e2654342" containerName="marketplace-operator" containerID="cri-o://b2fb5227972ad9ae476fa41df4649528697499ac0149502a3d6c020c10179213" gracePeriod=30 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.373680 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2bmls"] Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.373879 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2bmls" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerName="registry-server" containerID="cri-o://6f67e839370dd2340215d2f090a74b878ba583bec594cf40b3e049f74600b659" gracePeriod=30 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.383926 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lw9w7"] Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.384312 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lw9w7" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerName="registry-server" containerID="cri-o://a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974" gracePeriod=30 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.389842 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-wfjrk"] Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.390495 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.400844 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-wfjrk"] Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.498636 4922 generic.go:334] "Generic (PLEG): container finished" podID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerID="6f67e839370dd2340215d2f090a74b878ba583bec594cf40b3e049f74600b659" exitCode=0 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.498719 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2bmls" event={"ID":"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91","Type":"ContainerDied","Data":"6f67e839370dd2340215d2f090a74b878ba583bec594cf40b3e049f74600b659"} Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.501858 4922 generic.go:334] "Generic (PLEG): container finished" podID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerID="9aea3635edb0e2911cb0dd5db6248f529915ea256bf8f487e5b2d4473b443d76" exitCode=0 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.501919 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c7r4b" event={"ID":"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976","Type":"ContainerDied","Data":"9aea3635edb0e2911cb0dd5db6248f529915ea256bf8f487e5b2d4473b443d76"} Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.506158 4922 generic.go:334] "Generic (PLEG): container finished" podID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerID="bee628bd9bf65205aac2edbdbd52bddd2078bff258b0a3041309904241a8f4e8" exitCode=0 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.506247 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k5qr" event={"ID":"ce910279-f40a-4b94-9be2-718aa508e1d1","Type":"ContainerDied","Data":"bee628bd9bf65205aac2edbdbd52bddd2078bff258b0a3041309904241a8f4e8"} Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.508615 4922 generic.go:334] "Generic (PLEG): container finished" podID="d51b12db-7451-4cf2-bf6e-a156e2654342" containerID="b2fb5227972ad9ae476fa41df4649528697499ac0149502a3d6c020c10179213" exitCode=0 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.508664 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" event={"ID":"d51b12db-7451-4cf2-bf6e-a156e2654342","Type":"ContainerDied","Data":"b2fb5227972ad9ae476fa41df4649528697499ac0149502a3d6c020c10179213"} Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.571194 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/95f36381-a854-43df-bbe6-7afddea2b2c7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-wfjrk\" (UID: \"95f36381-a854-43df-bbe6-7afddea2b2c7\") " pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.571299 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/95f36381-a854-43df-bbe6-7afddea2b2c7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-wfjrk\" (UID: \"95f36381-a854-43df-bbe6-7afddea2b2c7\") " pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.571329 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx9j9\" (UniqueName: \"kubernetes.io/projected/95f36381-a854-43df-bbe6-7afddea2b2c7-kube-api-access-xx9j9\") pod \"marketplace-operator-79b997595-wfjrk\" (UID: \"95f36381-a854-43df-bbe6-7afddea2b2c7\") " pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.672156 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx9j9\" (UniqueName: \"kubernetes.io/projected/95f36381-a854-43df-bbe6-7afddea2b2c7-kube-api-access-xx9j9\") pod \"marketplace-operator-79b997595-wfjrk\" (UID: \"95f36381-a854-43df-bbe6-7afddea2b2c7\") " pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.672414 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/95f36381-a854-43df-bbe6-7afddea2b2c7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-wfjrk\" (UID: \"95f36381-a854-43df-bbe6-7afddea2b2c7\") " pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.672481 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/95f36381-a854-43df-bbe6-7afddea2b2c7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-wfjrk\" (UID: \"95f36381-a854-43df-bbe6-7afddea2b2c7\") " pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.673774 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/95f36381-a854-43df-bbe6-7afddea2b2c7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-wfjrk\" (UID: \"95f36381-a854-43df-bbe6-7afddea2b2c7\") " pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.678956 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/95f36381-a854-43df-bbe6-7afddea2b2c7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-wfjrk\" (UID: \"95f36381-a854-43df-bbe6-7afddea2b2c7\") " pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.698630 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx9j9\" (UniqueName: \"kubernetes.io/projected/95f36381-a854-43df-bbe6-7afddea2b2c7-kube-api-access-xx9j9\") pod \"marketplace-operator-79b997595-wfjrk\" (UID: \"95f36381-a854-43df-bbe6-7afddea2b2c7\") " pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.708172 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.776433 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.780659 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.793189 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.849838 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.855802 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.956456 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-wfjrk"] Sep 29 22:30:50 crc kubenswrapper[4922]: W0929 22:30:50.965799 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95f36381_a854_43df_bbe6_7afddea2b2c7.slice/crio-8864339e281ee55917efb11418fa76796fa5aaa0359456b104d23d51cc3a2b58 WatchSource:0}: Error finding container 8864339e281ee55917efb11418fa76796fa5aaa0359456b104d23d51cc3a2b58: Status 404 returned error can't find the container with id 8864339e281ee55917efb11418fa76796fa5aaa0359456b104d23d51cc3a2b58 Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974289 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-utilities\") pod \"ce910279-f40a-4b94-9be2-718aa508e1d1\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974347 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-trusted-ca\") pod \"d51b12db-7451-4cf2-bf6e-a156e2654342\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974375 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-utilities\") pod \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974409 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-operator-metrics\") pod \"d51b12db-7451-4cf2-bf6e-a156e2654342\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974433 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-utilities\") pod \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974461 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74mcv\" (UniqueName: \"kubernetes.io/projected/ce910279-f40a-4b94-9be2-718aa508e1d1-kube-api-access-74mcv\") pod \"ce910279-f40a-4b94-9be2-718aa508e1d1\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974491 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-catalog-content\") pod \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974524 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-catalog-content\") pod \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974550 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lds4h\" (UniqueName: \"kubernetes.io/projected/d51b12db-7451-4cf2-bf6e-a156e2654342-kube-api-access-lds4h\") pod \"d51b12db-7451-4cf2-bf6e-a156e2654342\" (UID: \"d51b12db-7451-4cf2-bf6e-a156e2654342\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974575 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-catalog-content\") pod \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974594 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62qhl\" (UniqueName: \"kubernetes.io/projected/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-kube-api-access-62qhl\") pod \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\" (UID: \"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974615 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz2lr\" (UniqueName: \"kubernetes.io/projected/e5e8f04f-0814-4db4-8614-ce1180ed65ca-kube-api-access-wz2lr\") pod \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974635 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-catalog-content\") pod \"ce910279-f40a-4b94-9be2-718aa508e1d1\" (UID: \"ce910279-f40a-4b94-9be2-718aa508e1d1\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974651 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-utilities\") pod \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\" (UID: \"e5e8f04f-0814-4db4-8614-ce1180ed65ca\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.974671 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pb2wl\" (UniqueName: \"kubernetes.io/projected/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-kube-api-access-pb2wl\") pod \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\" (UID: \"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976\") " Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.975379 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-utilities" (OuterVolumeSpecName: "utilities") pod "a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" (UID: "a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.975722 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-utilities" (OuterVolumeSpecName: "utilities") pod "ce910279-f40a-4b94-9be2-718aa508e1d1" (UID: "ce910279-f40a-4b94-9be2-718aa508e1d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.976571 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-utilities" (OuterVolumeSpecName: "utilities") pod "e5e8f04f-0814-4db4-8614-ce1180ed65ca" (UID: "e5e8f04f-0814-4db4-8614-ce1180ed65ca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.978815 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d51b12db-7451-4cf2-bf6e-a156e2654342-kube-api-access-lds4h" (OuterVolumeSpecName: "kube-api-access-lds4h") pod "d51b12db-7451-4cf2-bf6e-a156e2654342" (UID: "d51b12db-7451-4cf2-bf6e-a156e2654342"). InnerVolumeSpecName "kube-api-access-lds4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.978832 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce910279-f40a-4b94-9be2-718aa508e1d1-kube-api-access-74mcv" (OuterVolumeSpecName: "kube-api-access-74mcv") pod "ce910279-f40a-4b94-9be2-718aa508e1d1" (UID: "ce910279-f40a-4b94-9be2-718aa508e1d1"). InnerVolumeSpecName "kube-api-access-74mcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.978901 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "d51b12db-7451-4cf2-bf6e-a156e2654342" (UID: "d51b12db-7451-4cf2-bf6e-a156e2654342"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.979163 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5e8f04f-0814-4db4-8614-ce1180ed65ca-kube-api-access-wz2lr" (OuterVolumeSpecName: "kube-api-access-wz2lr") pod "e5e8f04f-0814-4db4-8614-ce1180ed65ca" (UID: "e5e8f04f-0814-4db4-8614-ce1180ed65ca"). InnerVolumeSpecName "kube-api-access-wz2lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.979672 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-kube-api-access-62qhl" (OuterVolumeSpecName: "kube-api-access-62qhl") pod "e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" (UID: "e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91"). InnerVolumeSpecName "kube-api-access-62qhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.982362 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-kube-api-access-pb2wl" (OuterVolumeSpecName: "kube-api-access-pb2wl") pod "a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" (UID: "a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976"). InnerVolumeSpecName "kube-api-access-pb2wl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.984428 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-utilities" (OuterVolumeSpecName: "utilities") pod "e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" (UID: "e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.986109 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "d51b12db-7451-4cf2-bf6e-a156e2654342" (UID: "d51b12db-7451-4cf2-bf6e-a156e2654342"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:30:50 crc kubenswrapper[4922]: I0929 22:30:50.997842 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" (UID: "e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.036644 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce910279-f40a-4b94-9be2-718aa508e1d1" (UID: "ce910279-f40a-4b94-9be2-718aa508e1d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.036819 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" (UID: "a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.075517 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.075675 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74mcv\" (UniqueName: \"kubernetes.io/projected/ce910279-f40a-4b94-9be2-718aa508e1d1-kube-api-access-74mcv\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.075745 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.075801 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.075854 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lds4h\" (UniqueName: \"kubernetes.io/projected/d51b12db-7451-4cf2-bf6e-a156e2654342-kube-api-access-lds4h\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.075907 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62qhl\" (UniqueName: \"kubernetes.io/projected/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91-kube-api-access-62qhl\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.075966 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz2lr\" (UniqueName: \"kubernetes.io/projected/e5e8f04f-0814-4db4-8614-ce1180ed65ca-kube-api-access-wz2lr\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.076025 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.076078 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.076413 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pb2wl\" (UniqueName: \"kubernetes.io/projected/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-kube-api-access-pb2wl\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.076487 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce910279-f40a-4b94-9be2-718aa508e1d1-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.076542 4922 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.076605 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.076666 4922 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d51b12db-7451-4cf2-bf6e-a156e2654342-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.093663 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e5e8f04f-0814-4db4-8614-ce1180ed65ca" (UID: "e5e8f04f-0814-4db4-8614-ce1180ed65ca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.178516 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5e8f04f-0814-4db4-8614-ce1180ed65ca-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.516939 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c7r4b" event={"ID":"a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976","Type":"ContainerDied","Data":"90f83069412c5495841cc0719e3d841e8e9151fa65aef0c2ce28b2d9c8a236a1"} Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.517711 4922 scope.go:117] "RemoveContainer" containerID="9aea3635edb0e2911cb0dd5db6248f529915ea256bf8f487e5b2d4473b443d76" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.516959 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c7r4b" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.519953 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4k5qr" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.519954 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4k5qr" event={"ID":"ce910279-f40a-4b94-9be2-718aa508e1d1","Type":"ContainerDied","Data":"2f1f182fd273c0d8b33592897b3683ea1023b3f4eadbcdebc60b8ce6729a8a20"} Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.527595 4922 generic.go:334] "Generic (PLEG): container finished" podID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerID="a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974" exitCode=0 Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.527686 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lw9w7" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.527878 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lw9w7" event={"ID":"e5e8f04f-0814-4db4-8614-ce1180ed65ca","Type":"ContainerDied","Data":"a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974"} Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.527941 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lw9w7" event={"ID":"e5e8f04f-0814-4db4-8614-ce1180ed65ca","Type":"ContainerDied","Data":"05a770eeb9f982e7856411703eadd8d9cccd33e68b848a8b5171ae59e520d907"} Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.529293 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" event={"ID":"95f36381-a854-43df-bbe6-7afddea2b2c7","Type":"ContainerStarted","Data":"e520fb30ae14ae2db44391922b82a1fc22af84164481864e49a61aadc3b1ce13"} Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.529320 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" event={"ID":"95f36381-a854-43df-bbe6-7afddea2b2c7","Type":"ContainerStarted","Data":"8864339e281ee55917efb11418fa76796fa5aaa0359456b104d23d51cc3a2b58"} Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.530431 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.532104 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" event={"ID":"d51b12db-7451-4cf2-bf6e-a156e2654342","Type":"ContainerDied","Data":"bebbccf830b64ce19db7cc9ff52a7312d22947bef4fc70c50abeb49028546c86"} Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.532203 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-xb8f7" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.533945 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.536604 4922 scope.go:117] "RemoveContainer" containerID="02e4ff22b0adc621debf63650401e013cabbd60382303bb5be91bcc6ddea11bf" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.539359 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2bmls" event={"ID":"e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91","Type":"ContainerDied","Data":"1e35ea767e46c9bcdbde6e407298cdba753bf73c64a85b8b70de6415165ccbe3"} Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.539564 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2bmls" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.555747 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-wfjrk" podStartSLOduration=1.555702956 podStartE2EDuration="1.555702956s" podCreationTimestamp="2025-09-29 22:30:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:30:51.555136212 +0000 UTC m=+255.865425025" watchObservedRunningTime="2025-09-29 22:30:51.555702956 +0000 UTC m=+255.865991779" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.573524 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c7r4b"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.575871 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c7r4b"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.590455 4922 scope.go:117] "RemoveContainer" containerID="8d5a31b7981b1f6e1dc394740ce4b05a439415fd49a744a60ec831ed4e39f4e8" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.579595 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4k5qr"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.594715 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4k5qr"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.606400 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xb8f7"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.610354 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-xb8f7"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.611322 4922 scope.go:117] "RemoveContainer" containerID="bee628bd9bf65205aac2edbdbd52bddd2078bff258b0a3041309904241a8f4e8" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.623323 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2bmls"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.628131 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2bmls"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.630912 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lw9w7"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.632091 4922 scope.go:117] "RemoveContainer" containerID="83d623ea7944d4d3c29e399844237e6483ad411fd243c8b34ce5f7f24400ed91" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.633022 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lw9w7"] Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.645847 4922 scope.go:117] "RemoveContainer" containerID="f8babb7cb6717cef45758cf9b40fc7b78b82be4ae7c77197ca60b9ff49ac4668" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.661436 4922 scope.go:117] "RemoveContainer" containerID="a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.675851 4922 scope.go:117] "RemoveContainer" containerID="fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.698648 4922 scope.go:117] "RemoveContainer" containerID="f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.716127 4922 scope.go:117] "RemoveContainer" containerID="a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974" Sep 29 22:30:51 crc kubenswrapper[4922]: E0929 22:30:51.716708 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974\": container with ID starting with a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974 not found: ID does not exist" containerID="a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.716740 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974"} err="failed to get container status \"a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974\": rpc error: code = NotFound desc = could not find container \"a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974\": container with ID starting with a193b8636d02370428004a47f2cfd72febbc420a73387d2bbcb60f908d6a2974 not found: ID does not exist" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.716763 4922 scope.go:117] "RemoveContainer" containerID="fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3" Sep 29 22:30:51 crc kubenswrapper[4922]: E0929 22:30:51.717164 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3\": container with ID starting with fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3 not found: ID does not exist" containerID="fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.717209 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3"} err="failed to get container status \"fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3\": rpc error: code = NotFound desc = could not find container \"fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3\": container with ID starting with fd8636ac174fe35cc9df96dcc19fa7acae8cba581b636888ca4e2e9857413cc3 not found: ID does not exist" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.717266 4922 scope.go:117] "RemoveContainer" containerID="f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530" Sep 29 22:30:51 crc kubenswrapper[4922]: E0929 22:30:51.717725 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530\": container with ID starting with f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530 not found: ID does not exist" containerID="f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.717748 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530"} err="failed to get container status \"f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530\": rpc error: code = NotFound desc = could not find container \"f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530\": container with ID starting with f7adb03120528d2dd898bda1f7884384acd8c53d9a00ed0e661ae1d57867e530 not found: ID does not exist" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.717777 4922 scope.go:117] "RemoveContainer" containerID="b2fb5227972ad9ae476fa41df4649528697499ac0149502a3d6c020c10179213" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.730556 4922 scope.go:117] "RemoveContainer" containerID="6f67e839370dd2340215d2f090a74b878ba583bec594cf40b3e049f74600b659" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.745038 4922 scope.go:117] "RemoveContainer" containerID="ae26d03ef3bb56eccaa8d581377cf9ad4407d96f0c7eb259151dbf92c8446f7e" Sep 29 22:30:51 crc kubenswrapper[4922]: I0929 22:30:51.757443 4922 scope.go:117] "RemoveContainer" containerID="1cbbfbaae6c0b9072abd77f8de7fe3c066d9ed03882fd1c2fdfbcdddf850de23" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.432423 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" path="/var/lib/kubelet/pods/a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976/volumes" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.434140 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" path="/var/lib/kubelet/pods/ce910279-f40a-4b94-9be2-718aa508e1d1/volumes" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.435654 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d51b12db-7451-4cf2-bf6e-a156e2654342" path="/var/lib/kubelet/pods/d51b12db-7451-4cf2-bf6e-a156e2654342/volumes" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.437355 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" path="/var/lib/kubelet/pods/e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91/volumes" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.438576 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" path="/var/lib/kubelet/pods/e5e8f04f-0814-4db4-8614-ce1180ed65ca/volumes" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.552603 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h9fcf"] Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.552967 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d51b12db-7451-4cf2-bf6e-a156e2654342" containerName="marketplace-operator" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.552985 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d51b12db-7451-4cf2-bf6e-a156e2654342" containerName="marketplace-operator" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553000 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerName="extract-content" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553008 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerName="extract-content" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553042 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerName="extract-content" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553050 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerName="extract-content" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553060 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553066 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553076 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553083 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553093 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerName="extract-utilities" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553117 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerName="extract-utilities" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553126 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerName="extract-utilities" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553132 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerName="extract-utilities" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553141 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerName="extract-content" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553148 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerName="extract-content" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553159 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerName="extract-content" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553166 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerName="extract-content" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553179 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553186 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553198 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553205 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553215 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerName="extract-utilities" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553223 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerName="extract-utilities" Sep 29 22:30:52 crc kubenswrapper[4922]: E0929 22:30:52.553234 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerName="extract-utilities" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553241 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerName="extract-utilities" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553346 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce910279-f40a-4b94-9be2-718aa508e1d1" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553361 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e56c3a7a-4803-4ffa-94a8-8b0cbc75fb91" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553371 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1f97f9b-c8dc-4a2f-a6c6-7d455b27f976" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553405 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5e8f04f-0814-4db4-8614-ce1180ed65ca" containerName="registry-server" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.553417 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d51b12db-7451-4cf2-bf6e-a156e2654342" containerName="marketplace-operator" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.554255 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.557364 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.563926 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h9fcf"] Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.708179 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/868a3ad6-a6e9-4e1c-9aad-638cc0337f1a-catalog-content\") pod \"redhat-marketplace-h9fcf\" (UID: \"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a\") " pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.708323 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/868a3ad6-a6e9-4e1c-9aad-638cc0337f1a-utilities\") pod \"redhat-marketplace-h9fcf\" (UID: \"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a\") " pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.708359 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9h5l\" (UniqueName: \"kubernetes.io/projected/868a3ad6-a6e9-4e1c-9aad-638cc0337f1a-kube-api-access-h9h5l\") pod \"redhat-marketplace-h9fcf\" (UID: \"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a\") " pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.752286 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vl2vq"] Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.753285 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.759269 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.774458 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vl2vq"] Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.809383 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/868a3ad6-a6e9-4e1c-9aad-638cc0337f1a-utilities\") pod \"redhat-marketplace-h9fcf\" (UID: \"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a\") " pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.809450 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-catalog-content\") pod \"redhat-operators-vl2vq\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.809495 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9h5l\" (UniqueName: \"kubernetes.io/projected/868a3ad6-a6e9-4e1c-9aad-638cc0337f1a-kube-api-access-h9h5l\") pod \"redhat-marketplace-h9fcf\" (UID: \"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a\") " pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.809512 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7zw8\" (UniqueName: \"kubernetes.io/projected/c532762f-a530-445f-bb45-08438b834442-kube-api-access-l7zw8\") pod \"redhat-operators-vl2vq\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.809534 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/868a3ad6-a6e9-4e1c-9aad-638cc0337f1a-catalog-content\") pod \"redhat-marketplace-h9fcf\" (UID: \"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a\") " pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.809769 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-utilities\") pod \"redhat-operators-vl2vq\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.810337 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/868a3ad6-a6e9-4e1c-9aad-638cc0337f1a-utilities\") pod \"redhat-marketplace-h9fcf\" (UID: \"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a\") " pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.810523 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/868a3ad6-a6e9-4e1c-9aad-638cc0337f1a-catalog-content\") pod \"redhat-marketplace-h9fcf\" (UID: \"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a\") " pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.826672 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9h5l\" (UniqueName: \"kubernetes.io/projected/868a3ad6-a6e9-4e1c-9aad-638cc0337f1a-kube-api-access-h9h5l\") pod \"redhat-marketplace-h9fcf\" (UID: \"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a\") " pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.880407 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.910168 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-catalog-content\") pod \"redhat-operators-vl2vq\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.910220 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7zw8\" (UniqueName: \"kubernetes.io/projected/c532762f-a530-445f-bb45-08438b834442-kube-api-access-l7zw8\") pod \"redhat-operators-vl2vq\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.910276 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-utilities\") pod \"redhat-operators-vl2vq\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.910693 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-catalog-content\") pod \"redhat-operators-vl2vq\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.910726 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-utilities\") pod \"redhat-operators-vl2vq\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:52 crc kubenswrapper[4922]: I0929 22:30:52.927523 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7zw8\" (UniqueName: \"kubernetes.io/projected/c532762f-a530-445f-bb45-08438b834442-kube-api-access-l7zw8\") pod \"redhat-operators-vl2vq\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:53 crc kubenswrapper[4922]: I0929 22:30:53.084608 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:30:53 crc kubenswrapper[4922]: I0929 22:30:53.102594 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h9fcf"] Sep 29 22:30:53 crc kubenswrapper[4922]: W0929 22:30:53.111987 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod868a3ad6_a6e9_4e1c_9aad_638cc0337f1a.slice/crio-4ccc638477ba4e2cf5cbc7af22968e9faa0226e58d6c2a1e4b7466f95defeb88 WatchSource:0}: Error finding container 4ccc638477ba4e2cf5cbc7af22968e9faa0226e58d6c2a1e4b7466f95defeb88: Status 404 returned error can't find the container with id 4ccc638477ba4e2cf5cbc7af22968e9faa0226e58d6c2a1e4b7466f95defeb88 Sep 29 22:30:53 crc kubenswrapper[4922]: I0929 22:30:53.289858 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vl2vq"] Sep 29 22:30:53 crc kubenswrapper[4922]: I0929 22:30:53.570934 4922 generic.go:334] "Generic (PLEG): container finished" podID="c532762f-a530-445f-bb45-08438b834442" containerID="0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6" exitCode=0 Sep 29 22:30:53 crc kubenswrapper[4922]: I0929 22:30:53.571047 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl2vq" event={"ID":"c532762f-a530-445f-bb45-08438b834442","Type":"ContainerDied","Data":"0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6"} Sep 29 22:30:53 crc kubenswrapper[4922]: I0929 22:30:53.571088 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl2vq" event={"ID":"c532762f-a530-445f-bb45-08438b834442","Type":"ContainerStarted","Data":"2dbc9905c8a20e3d3edf7dcb7e458f78253d3aaf99f8e52182bef06d5f4eee20"} Sep 29 22:30:53 crc kubenswrapper[4922]: I0929 22:30:53.573113 4922 generic.go:334] "Generic (PLEG): container finished" podID="868a3ad6-a6e9-4e1c-9aad-638cc0337f1a" containerID="2a909cbffc0ed01dca313302f3950fba67b9f56f0c9227f7f31e150bce5cf968" exitCode=0 Sep 29 22:30:53 crc kubenswrapper[4922]: I0929 22:30:53.573246 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9fcf" event={"ID":"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a","Type":"ContainerDied","Data":"2a909cbffc0ed01dca313302f3950fba67b9f56f0c9227f7f31e150bce5cf968"} Sep 29 22:30:53 crc kubenswrapper[4922]: I0929 22:30:53.573308 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9fcf" event={"ID":"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a","Type":"ContainerStarted","Data":"4ccc638477ba4e2cf5cbc7af22968e9faa0226e58d6c2a1e4b7466f95defeb88"} Sep 29 22:30:54 crc kubenswrapper[4922]: I0929 22:30:54.591984 4922 generic.go:334] "Generic (PLEG): container finished" podID="868a3ad6-a6e9-4e1c-9aad-638cc0337f1a" containerID="3ac965006ce42c3d2a5f39368e9933fed0d1a50352605e885f81cd6a494e3dcb" exitCode=0 Sep 29 22:30:54 crc kubenswrapper[4922]: I0929 22:30:54.592050 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9fcf" event={"ID":"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a","Type":"ContainerDied","Data":"3ac965006ce42c3d2a5f39368e9933fed0d1a50352605e885f81cd6a494e3dcb"} Sep 29 22:30:54 crc kubenswrapper[4922]: I0929 22:30:54.595684 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl2vq" event={"ID":"c532762f-a530-445f-bb45-08438b834442","Type":"ContainerStarted","Data":"48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232"} Sep 29 22:30:54 crc kubenswrapper[4922]: I0929 22:30:54.954855 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xk4lz"] Sep 29 22:30:54 crc kubenswrapper[4922]: I0929 22:30:54.956439 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:54 crc kubenswrapper[4922]: I0929 22:30:54.960497 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 22:30:54 crc kubenswrapper[4922]: I0929 22:30:54.967579 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xk4lz"] Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.136582 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dfb81d8-4b0b-4414-8a11-f0d8b72e471f-catalog-content\") pod \"certified-operators-xk4lz\" (UID: \"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f\") " pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.136682 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dfb81d8-4b0b-4414-8a11-f0d8b72e471f-utilities\") pod \"certified-operators-xk4lz\" (UID: \"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f\") " pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.136765 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8qqb\" (UniqueName: \"kubernetes.io/projected/5dfb81d8-4b0b-4414-8a11-f0d8b72e471f-kube-api-access-s8qqb\") pod \"certified-operators-xk4lz\" (UID: \"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f\") " pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.171163 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-49hdj"] Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.172239 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.174638 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.178467 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-49hdj"] Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.237689 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dfb81d8-4b0b-4414-8a11-f0d8b72e471f-catalog-content\") pod \"certified-operators-xk4lz\" (UID: \"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f\") " pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.237808 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dfb81d8-4b0b-4414-8a11-f0d8b72e471f-utilities\") pod \"certified-operators-xk4lz\" (UID: \"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f\") " pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.237924 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8qqb\" (UniqueName: \"kubernetes.io/projected/5dfb81d8-4b0b-4414-8a11-f0d8b72e471f-kube-api-access-s8qqb\") pod \"certified-operators-xk4lz\" (UID: \"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f\") " pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.238254 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dfb81d8-4b0b-4414-8a11-f0d8b72e471f-utilities\") pod \"certified-operators-xk4lz\" (UID: \"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f\") " pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.238378 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dfb81d8-4b0b-4414-8a11-f0d8b72e471f-catalog-content\") pod \"certified-operators-xk4lz\" (UID: \"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f\") " pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.262305 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8qqb\" (UniqueName: \"kubernetes.io/projected/5dfb81d8-4b0b-4414-8a11-f0d8b72e471f-kube-api-access-s8qqb\") pod \"certified-operators-xk4lz\" (UID: \"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f\") " pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.287870 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.339724 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-utilities\") pod \"community-operators-49hdj\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.339854 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbwnq\" (UniqueName: \"kubernetes.io/projected/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-kube-api-access-zbwnq\") pod \"community-operators-49hdj\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.339945 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-catalog-content\") pod \"community-operators-49hdj\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.441006 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-utilities\") pod \"community-operators-49hdj\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.441290 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbwnq\" (UniqueName: \"kubernetes.io/projected/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-kube-api-access-zbwnq\") pod \"community-operators-49hdj\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.441331 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-catalog-content\") pod \"community-operators-49hdj\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.441770 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-catalog-content\") pod \"community-operators-49hdj\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.442613 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-utilities\") pod \"community-operators-49hdj\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.457339 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbwnq\" (UniqueName: \"kubernetes.io/projected/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-kube-api-access-zbwnq\") pod \"community-operators-49hdj\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.469884 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xk4lz"] Sep 29 22:30:55 crc kubenswrapper[4922]: W0929 22:30:55.476673 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5dfb81d8_4b0b_4414_8a11_f0d8b72e471f.slice/crio-1affc04500666d8858b0e69cc5658cdbbf08b9adf678da58bf6bccede063d3c8 WatchSource:0}: Error finding container 1affc04500666d8858b0e69cc5658cdbbf08b9adf678da58bf6bccede063d3c8: Status 404 returned error can't find the container with id 1affc04500666d8858b0e69cc5658cdbbf08b9adf678da58bf6bccede063d3c8 Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.489959 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.602517 4922 generic.go:334] "Generic (PLEG): container finished" podID="c532762f-a530-445f-bb45-08438b834442" containerID="48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232" exitCode=0 Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.602657 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl2vq" event={"ID":"c532762f-a530-445f-bb45-08438b834442","Type":"ContainerDied","Data":"48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232"} Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.604690 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xk4lz" event={"ID":"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f","Type":"ContainerStarted","Data":"1383ce25e4b101b3bd263b332624512cb43c563e6dd2d98d42368b5f6e6b784b"} Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.604714 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xk4lz" event={"ID":"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f","Type":"ContainerStarted","Data":"1affc04500666d8858b0e69cc5658cdbbf08b9adf678da58bf6bccede063d3c8"} Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.607630 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h9fcf" event={"ID":"868a3ad6-a6e9-4e1c-9aad-638cc0337f1a","Type":"ContainerStarted","Data":"73dede4aa7c388933a5a3fcc8350dee0058af7cd1d106157f64c8e2c9eccf722"} Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.647060 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h9fcf" podStartSLOduration=2.227712551 podStartE2EDuration="3.647044928s" podCreationTimestamp="2025-09-29 22:30:52 +0000 UTC" firstStartedPulling="2025-09-29 22:30:53.575426324 +0000 UTC m=+257.885715137" lastFinishedPulling="2025-09-29 22:30:54.994758671 +0000 UTC m=+259.305047514" observedRunningTime="2025-09-29 22:30:55.64388139 +0000 UTC m=+259.954170203" watchObservedRunningTime="2025-09-29 22:30:55.647044928 +0000 UTC m=+259.957333741" Sep 29 22:30:55 crc kubenswrapper[4922]: I0929 22:30:55.697906 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-49hdj"] Sep 29 22:30:56 crc kubenswrapper[4922]: I0929 22:30:56.621725 4922 generic.go:334] "Generic (PLEG): container finished" podID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerID="0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e" exitCode=0 Sep 29 22:30:56 crc kubenswrapper[4922]: I0929 22:30:56.621788 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49hdj" event={"ID":"ac2c8151-6c2e-4341-a5fd-0beb09dcba81","Type":"ContainerDied","Data":"0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e"} Sep 29 22:30:56 crc kubenswrapper[4922]: I0929 22:30:56.622145 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49hdj" event={"ID":"ac2c8151-6c2e-4341-a5fd-0beb09dcba81","Type":"ContainerStarted","Data":"77977630dceb27ba17bbf1a439a29111ead811255f60fa3cec45798d58514d35"} Sep 29 22:30:56 crc kubenswrapper[4922]: I0929 22:30:56.629900 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl2vq" event={"ID":"c532762f-a530-445f-bb45-08438b834442","Type":"ContainerStarted","Data":"696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d"} Sep 29 22:30:56 crc kubenswrapper[4922]: I0929 22:30:56.635377 4922 generic.go:334] "Generic (PLEG): container finished" podID="5dfb81d8-4b0b-4414-8a11-f0d8b72e471f" containerID="1383ce25e4b101b3bd263b332624512cb43c563e6dd2d98d42368b5f6e6b784b" exitCode=0 Sep 29 22:30:56 crc kubenswrapper[4922]: I0929 22:30:56.636178 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xk4lz" event={"ID":"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f","Type":"ContainerDied","Data":"1383ce25e4b101b3bd263b332624512cb43c563e6dd2d98d42368b5f6e6b784b"} Sep 29 22:30:57 crc kubenswrapper[4922]: I0929 22:30:57.648914 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49hdj" event={"ID":"ac2c8151-6c2e-4341-a5fd-0beb09dcba81","Type":"ContainerStarted","Data":"bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933"} Sep 29 22:30:57 crc kubenswrapper[4922]: I0929 22:30:57.662791 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xk4lz" event={"ID":"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f","Type":"ContainerStarted","Data":"c716fd74072486cbe73450861a83262a7f25430cf917059b9864e6a23d148768"} Sep 29 22:30:57 crc kubenswrapper[4922]: I0929 22:30:57.682344 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vl2vq" podStartSLOduration=3.143377372 podStartE2EDuration="5.682308831s" podCreationTimestamp="2025-09-29 22:30:52 +0000 UTC" firstStartedPulling="2025-09-29 22:30:53.573885956 +0000 UTC m=+257.884174769" lastFinishedPulling="2025-09-29 22:30:56.112817415 +0000 UTC m=+260.423106228" observedRunningTime="2025-09-29 22:30:56.693073918 +0000 UTC m=+261.003362741" watchObservedRunningTime="2025-09-29 22:30:57.682308831 +0000 UTC m=+261.992597694" Sep 29 22:30:58 crc kubenswrapper[4922]: I0929 22:30:58.673139 4922 generic.go:334] "Generic (PLEG): container finished" podID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerID="bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933" exitCode=0 Sep 29 22:30:58 crc kubenswrapper[4922]: I0929 22:30:58.673264 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49hdj" event={"ID":"ac2c8151-6c2e-4341-a5fd-0beb09dcba81","Type":"ContainerDied","Data":"bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933"} Sep 29 22:30:58 crc kubenswrapper[4922]: I0929 22:30:58.676580 4922 generic.go:334] "Generic (PLEG): container finished" podID="5dfb81d8-4b0b-4414-8a11-f0d8b72e471f" containerID="c716fd74072486cbe73450861a83262a7f25430cf917059b9864e6a23d148768" exitCode=0 Sep 29 22:30:58 crc kubenswrapper[4922]: I0929 22:30:58.676650 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xk4lz" event={"ID":"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f","Type":"ContainerDied","Data":"c716fd74072486cbe73450861a83262a7f25430cf917059b9864e6a23d148768"} Sep 29 22:30:59 crc kubenswrapper[4922]: I0929 22:30:59.685297 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xk4lz" event={"ID":"5dfb81d8-4b0b-4414-8a11-f0d8b72e471f","Type":"ContainerStarted","Data":"ec9c3ec4802309f22c7718f5b0519f560d8b08e7bf13e8092b40478ecec3f477"} Sep 29 22:30:59 crc kubenswrapper[4922]: I0929 22:30:59.714351 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xk4lz" podStartSLOduration=3.090638755 podStartE2EDuration="5.714328264s" podCreationTimestamp="2025-09-29 22:30:54 +0000 UTC" firstStartedPulling="2025-09-29 22:30:56.646412412 +0000 UTC m=+260.956701225" lastFinishedPulling="2025-09-29 22:30:59.270101921 +0000 UTC m=+263.580390734" observedRunningTime="2025-09-29 22:30:59.713660717 +0000 UTC m=+264.023949540" watchObservedRunningTime="2025-09-29 22:30:59.714328264 +0000 UTC m=+264.024617077" Sep 29 22:31:00 crc kubenswrapper[4922]: I0929 22:31:00.693279 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49hdj" event={"ID":"ac2c8151-6c2e-4341-a5fd-0beb09dcba81","Type":"ContainerStarted","Data":"73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d"} Sep 29 22:31:00 crc kubenswrapper[4922]: I0929 22:31:00.712708 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-49hdj" podStartSLOduration=2.36250211 podStartE2EDuration="5.712687283s" podCreationTimestamp="2025-09-29 22:30:55 +0000 UTC" firstStartedPulling="2025-09-29 22:30:56.625579386 +0000 UTC m=+260.935868209" lastFinishedPulling="2025-09-29 22:30:59.975764529 +0000 UTC m=+264.286053382" observedRunningTime="2025-09-29 22:31:00.712114899 +0000 UTC m=+265.022403712" watchObservedRunningTime="2025-09-29 22:31:00.712687283 +0000 UTC m=+265.022976096" Sep 29 22:31:02 crc kubenswrapper[4922]: I0929 22:31:02.881685 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:31:02 crc kubenswrapper[4922]: I0929 22:31:02.881766 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:31:02 crc kubenswrapper[4922]: I0929 22:31:02.944532 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:31:03 crc kubenswrapper[4922]: I0929 22:31:03.085774 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:31:03 crc kubenswrapper[4922]: I0929 22:31:03.085865 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:31:03 crc kubenswrapper[4922]: I0929 22:31:03.152881 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:31:03 crc kubenswrapper[4922]: I0929 22:31:03.766567 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 22:31:03 crc kubenswrapper[4922]: I0929 22:31:03.770670 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h9fcf" Sep 29 22:31:05 crc kubenswrapper[4922]: I0929 22:31:05.288259 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:31:05 crc kubenswrapper[4922]: I0929 22:31:05.288689 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:31:05 crc kubenswrapper[4922]: I0929 22:31:05.331325 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:31:05 crc kubenswrapper[4922]: I0929 22:31:05.490531 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:31:05 crc kubenswrapper[4922]: I0929 22:31:05.490619 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:31:05 crc kubenswrapper[4922]: I0929 22:31:05.532131 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:31:05 crc kubenswrapper[4922]: I0929 22:31:05.771868 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-49hdj" Sep 29 22:31:05 crc kubenswrapper[4922]: I0929 22:31:05.776763 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xk4lz" Sep 29 22:32:28 crc kubenswrapper[4922]: I0929 22:32:28.913236 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:32:28 crc kubenswrapper[4922]: I0929 22:32:28.913988 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:32:58 crc kubenswrapper[4922]: I0929 22:32:58.912683 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:32:58 crc kubenswrapper[4922]: I0929 22:32:58.913515 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:33:28 crc kubenswrapper[4922]: I0929 22:33:28.913087 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:33:28 crc kubenswrapper[4922]: I0929 22:33:28.914551 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:33:28 crc kubenswrapper[4922]: I0929 22:33:28.914670 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:33:28 crc kubenswrapper[4922]: I0929 22:33:28.915454 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"accabd2cf4dc0ea91bb223aa4ccdc7ab29b4040f94afa9cb51a973916e1e42e0"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:33:28 crc kubenswrapper[4922]: I0929 22:33:28.915652 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://accabd2cf4dc0ea91bb223aa4ccdc7ab29b4040f94afa9cb51a973916e1e42e0" gracePeriod=600 Sep 29 22:33:29 crc kubenswrapper[4922]: I0929 22:33:29.665312 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="accabd2cf4dc0ea91bb223aa4ccdc7ab29b4040f94afa9cb51a973916e1e42e0" exitCode=0 Sep 29 22:33:29 crc kubenswrapper[4922]: I0929 22:33:29.665377 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"accabd2cf4dc0ea91bb223aa4ccdc7ab29b4040f94afa9cb51a973916e1e42e0"} Sep 29 22:33:29 crc kubenswrapper[4922]: I0929 22:33:29.665788 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"be2fe9125c1ce8caf5b45073baadd0f21588a94e8d9279d703866cc234e4eaaf"} Sep 29 22:33:29 crc kubenswrapper[4922]: I0929 22:33:29.665822 4922 scope.go:117] "RemoveContainer" containerID="50dcf8b01222d137cd4139023c56b8647b447f984aabc676be00accb5ae7ae59" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.381685 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bk758"] Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.382887 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.450918 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bk758"] Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.541623 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.541681 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.541707 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.541737 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-trusted-ca\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.541765 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-bound-sa-token\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.541952 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-registry-tls\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.542009 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-registry-certificates\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.542028 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w75lz\" (UniqueName: \"kubernetes.io/projected/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-kube-api-access-w75lz\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.561986 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.642888 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.642949 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.642993 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-trusted-ca\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.643018 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-bound-sa-token\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.643037 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-registry-tls\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.643060 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-registry-certificates\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.643075 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w75lz\" (UniqueName: \"kubernetes.io/projected/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-kube-api-access-w75lz\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.644338 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-ca-trust-extracted\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.644504 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-trusted-ca\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.644637 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-registry-certificates\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.649018 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-installation-pull-secrets\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.657362 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-registry-tls\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.663548 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-bound-sa-token\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.666982 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w75lz\" (UniqueName: \"kubernetes.io/projected/1e3a579a-6cc8-43e3-9767-7630c4fa2cf9-kube-api-access-w75lz\") pod \"image-registry-66df7c8f76-bk758\" (UID: \"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9\") " pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:35 crc kubenswrapper[4922]: I0929 22:33:35.697906 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:36 crc kubenswrapper[4922]: I0929 22:33:36.103150 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-bk758"] Sep 29 22:33:36 crc kubenswrapper[4922]: I0929 22:33:36.717232 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bk758" event={"ID":"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9","Type":"ContainerStarted","Data":"6379c7a2bb64e400645ebd6ed4d001ce4eddb5267dd569622c7d8c417452bf6e"} Sep 29 22:33:36 crc kubenswrapper[4922]: I0929 22:33:36.717278 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-bk758" event={"ID":"1e3a579a-6cc8-43e3-9767-7630c4fa2cf9","Type":"ContainerStarted","Data":"f4019344a28519bfbf6803d9f0d4b66173acfe5b8f10cc7a1ecc273773f0cb1c"} Sep 29 22:33:36 crc kubenswrapper[4922]: I0929 22:33:36.717431 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:36 crc kubenswrapper[4922]: I0929 22:33:36.741835 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-bk758" podStartSLOduration=1.741819636 podStartE2EDuration="1.741819636s" podCreationTimestamp="2025-09-29 22:33:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:33:36.739458598 +0000 UTC m=+421.049747421" watchObservedRunningTime="2025-09-29 22:33:36.741819636 +0000 UTC m=+421.052108459" Sep 29 22:33:55 crc kubenswrapper[4922]: I0929 22:33:55.706037 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-bk758" Sep 29 22:33:55 crc kubenswrapper[4922]: I0929 22:33:55.784145 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rwhm5"] Sep 29 22:34:20 crc kubenswrapper[4922]: I0929 22:34:20.843758 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" podUID="104a5cfb-7f2a-48d1-be00-10f698d0b552" containerName="registry" containerID="cri-o://d17bf8df2a847e83ff1648983d2bc378cb485b27f7b546e18a90df372fd289b3" gracePeriod=30 Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.007612 4922 generic.go:334] "Generic (PLEG): container finished" podID="104a5cfb-7f2a-48d1-be00-10f698d0b552" containerID="d17bf8df2a847e83ff1648983d2bc378cb485b27f7b546e18a90df372fd289b3" exitCode=0 Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.007677 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" event={"ID":"104a5cfb-7f2a-48d1-be00-10f698d0b552","Type":"ContainerDied","Data":"d17bf8df2a847e83ff1648983d2bc378cb485b27f7b546e18a90df372fd289b3"} Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.284112 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.345014 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/104a5cfb-7f2a-48d1-be00-10f698d0b552-installation-pull-secrets\") pod \"104a5cfb-7f2a-48d1-be00-10f698d0b552\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.345088 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-certificates\") pod \"104a5cfb-7f2a-48d1-be00-10f698d0b552\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.345131 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-trusted-ca\") pod \"104a5cfb-7f2a-48d1-be00-10f698d0b552\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.345321 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"104a5cfb-7f2a-48d1-be00-10f698d0b552\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.345359 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-tls\") pod \"104a5cfb-7f2a-48d1-be00-10f698d0b552\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.345423 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-bound-sa-token\") pod \"104a5cfb-7f2a-48d1-be00-10f698d0b552\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.345497 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/104a5cfb-7f2a-48d1-be00-10f698d0b552-ca-trust-extracted\") pod \"104a5cfb-7f2a-48d1-be00-10f698d0b552\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.345537 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dp2n5\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-kube-api-access-dp2n5\") pod \"104a5cfb-7f2a-48d1-be00-10f698d0b552\" (UID: \"104a5cfb-7f2a-48d1-be00-10f698d0b552\") " Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.346736 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "104a5cfb-7f2a-48d1-be00-10f698d0b552" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.346833 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "104a5cfb-7f2a-48d1-be00-10f698d0b552" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.355558 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/104a5cfb-7f2a-48d1-be00-10f698d0b552-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "104a5cfb-7f2a-48d1-be00-10f698d0b552" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.358664 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "104a5cfb-7f2a-48d1-be00-10f698d0b552" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.359067 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-kube-api-access-dp2n5" (OuterVolumeSpecName: "kube-api-access-dp2n5") pod "104a5cfb-7f2a-48d1-be00-10f698d0b552" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552"). InnerVolumeSpecName "kube-api-access-dp2n5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.359638 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "104a5cfb-7f2a-48d1-be00-10f698d0b552" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.370384 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "104a5cfb-7f2a-48d1-be00-10f698d0b552" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.380861 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104a5cfb-7f2a-48d1-be00-10f698d0b552-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "104a5cfb-7f2a-48d1-be00-10f698d0b552" (UID: "104a5cfb-7f2a-48d1-be00-10f698d0b552"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.446705 4922 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.446760 4922 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/104a5cfb-7f2a-48d1-be00-10f698d0b552-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.446821 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dp2n5\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-kube-api-access-dp2n5\") on node \"crc\" DevicePath \"\"" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.446848 4922 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/104a5cfb-7f2a-48d1-be00-10f698d0b552-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.446868 4922 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.446887 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/104a5cfb-7f2a-48d1-be00-10f698d0b552-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:34:21 crc kubenswrapper[4922]: I0929 22:34:21.446905 4922 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/104a5cfb-7f2a-48d1-be00-10f698d0b552-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:34:22 crc kubenswrapper[4922]: I0929 22:34:22.018558 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" event={"ID":"104a5cfb-7f2a-48d1-be00-10f698d0b552","Type":"ContainerDied","Data":"7c10ab9754767a0188430929855ee21f8df3cf81117ba5f36595628489c731f7"} Sep 29 22:34:22 crc kubenswrapper[4922]: I0929 22:34:22.018661 4922 scope.go:117] "RemoveContainer" containerID="d17bf8df2a847e83ff1648983d2bc378cb485b27f7b546e18a90df372fd289b3" Sep 29 22:34:22 crc kubenswrapper[4922]: I0929 22:34:22.019567 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-rwhm5" Sep 29 22:34:22 crc kubenswrapper[4922]: I0929 22:34:22.072358 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rwhm5"] Sep 29 22:34:22 crc kubenswrapper[4922]: I0929 22:34:22.082518 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-rwhm5"] Sep 29 22:34:22 crc kubenswrapper[4922]: I0929 22:34:22.434614 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="104a5cfb-7f2a-48d1-be00-10f698d0b552" path="/var/lib/kubelet/pods/104a5cfb-7f2a-48d1-be00-10f698d0b552/volumes" Sep 29 22:35:36 crc kubenswrapper[4922]: I0929 22:35:36.597356 4922 scope.go:117] "RemoveContainer" containerID="719e6c9968ce99c746377175154453274ce12819e5e16f322642e2f703e7b183" Sep 29 22:35:58 crc kubenswrapper[4922]: I0929 22:35:58.912690 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:35:58 crc kubenswrapper[4922]: I0929 22:35:58.914892 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:36:28 crc kubenswrapper[4922]: I0929 22:36:28.912433 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:36:28 crc kubenswrapper[4922]: I0929 22:36:28.913048 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:36:36 crc kubenswrapper[4922]: I0929 22:36:36.636978 4922 scope.go:117] "RemoveContainer" containerID="7660ea69b3a7f660d84532143f363089d14ec1b3f3c77b0985d34271576e2f8c" Sep 29 22:36:36 crc kubenswrapper[4922]: I0929 22:36:36.666622 4922 scope.go:117] "RemoveContainer" containerID="da692710ffb0df2641f9a9ef63eb637da3ae72fa28481e702ec71429b586c929" Sep 29 22:36:58 crc kubenswrapper[4922]: I0929 22:36:58.913245 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:36:58 crc kubenswrapper[4922]: I0929 22:36:58.914147 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:36:58 crc kubenswrapper[4922]: I0929 22:36:58.914767 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:36:58 crc kubenswrapper[4922]: I0929 22:36:58.916280 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"be2fe9125c1ce8caf5b45073baadd0f21588a94e8d9279d703866cc234e4eaaf"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:36:58 crc kubenswrapper[4922]: I0929 22:36:58.916763 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://be2fe9125c1ce8caf5b45073baadd0f21588a94e8d9279d703866cc234e4eaaf" gracePeriod=600 Sep 29 22:36:59 crc kubenswrapper[4922]: I0929 22:36:59.089507 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="be2fe9125c1ce8caf5b45073baadd0f21588a94e8d9279d703866cc234e4eaaf" exitCode=0 Sep 29 22:36:59 crc kubenswrapper[4922]: I0929 22:36:59.089569 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"be2fe9125c1ce8caf5b45073baadd0f21588a94e8d9279d703866cc234e4eaaf"} Sep 29 22:36:59 crc kubenswrapper[4922]: I0929 22:36:59.089623 4922 scope.go:117] "RemoveContainer" containerID="accabd2cf4dc0ea91bb223aa4ccdc7ab29b4040f94afa9cb51a973916e1e42e0" Sep 29 22:37:00 crc kubenswrapper[4922]: I0929 22:37:00.102861 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"2fff5dec163c43924ec181a6c7d9ee934e027ea79ccf259ff2b5530d85b03707"} Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.473334 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7sqx6"] Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.474593 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" podUID="c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" containerName="controller-manager" containerID="cri-o://7c9e93d5db38ee545214c0080870b9c33a58f1b0719e9e49cb2830160116864e" gracePeriod=30 Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.563044 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct"] Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.563297 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" podUID="c30379d2-4644-450d-bc35-d6a4c857d840" containerName="route-controller-manager" containerID="cri-o://23eb23f4044a103569ddce0d2391f49494ca9a625ee490562e49f9168e8ec0ee" gracePeriod=30 Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.940173 4922 generic.go:334] "Generic (PLEG): container finished" podID="c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" containerID="7c9e93d5db38ee545214c0080870b9c33a58f1b0719e9e49cb2830160116864e" exitCode=0 Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.940376 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" event={"ID":"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc","Type":"ContainerDied","Data":"7c9e93d5db38ee545214c0080870b9c33a58f1b0719e9e49cb2830160116864e"} Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.940514 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" event={"ID":"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc","Type":"ContainerDied","Data":"7984240d443793470514431959e6bd6c3b9305fa0af78996ddd3feb6bb57fafb"} Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.940564 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7984240d443793470514431959e6bd6c3b9305fa0af78996ddd3feb6bb57fafb" Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.941938 4922 generic.go:334] "Generic (PLEG): container finished" podID="c30379d2-4644-450d-bc35-d6a4c857d840" containerID="23eb23f4044a103569ddce0d2391f49494ca9a625ee490562e49f9168e8ec0ee" exitCode=0 Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.941959 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" event={"ID":"c30379d2-4644-450d-bc35-d6a4c857d840","Type":"ContainerDied","Data":"23eb23f4044a103569ddce0d2391f49494ca9a625ee490562e49f9168e8ec0ee"} Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.959046 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:39:03 crc kubenswrapper[4922]: I0929 22:39:03.994910 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.051466 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-client-ca\") pod \"c30379d2-4644-450d-bc35-d6a4c857d840\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.051534 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-client-ca\") pod \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.051573 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pz4vd\" (UniqueName: \"kubernetes.io/projected/c30379d2-4644-450d-bc35-d6a4c857d840-kube-api-access-pz4vd\") pod \"c30379d2-4644-450d-bc35-d6a4c857d840\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.051601 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdbx7\" (UniqueName: \"kubernetes.io/projected/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-kube-api-access-rdbx7\") pod \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.051639 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c30379d2-4644-450d-bc35-d6a4c857d840-serving-cert\") pod \"c30379d2-4644-450d-bc35-d6a4c857d840\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.051660 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-proxy-ca-bundles\") pod \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.051680 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-config\") pod \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.051698 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-serving-cert\") pod \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\" (UID: \"c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc\") " Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.051716 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-config\") pod \"c30379d2-4644-450d-bc35-d6a4c857d840\" (UID: \"c30379d2-4644-450d-bc35-d6a4c857d840\") " Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.052134 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-client-ca" (OuterVolumeSpecName: "client-ca") pod "c30379d2-4644-450d-bc35-d6a4c857d840" (UID: "c30379d2-4644-450d-bc35-d6a4c857d840"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.052194 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-config" (OuterVolumeSpecName: "config") pod "c30379d2-4644-450d-bc35-d6a4c857d840" (UID: "c30379d2-4644-450d-bc35-d6a4c857d840"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.052551 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" (UID: "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.052921 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-config" (OuterVolumeSpecName: "config") pod "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" (UID: "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.053157 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-client-ca" (OuterVolumeSpecName: "client-ca") pod "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" (UID: "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.057646 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-kube-api-access-rdbx7" (OuterVolumeSpecName: "kube-api-access-rdbx7") pod "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" (UID: "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc"). InnerVolumeSpecName "kube-api-access-rdbx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.057788 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c30379d2-4644-450d-bc35-d6a4c857d840-kube-api-access-pz4vd" (OuterVolumeSpecName: "kube-api-access-pz4vd") pod "c30379d2-4644-450d-bc35-d6a4c857d840" (UID: "c30379d2-4644-450d-bc35-d6a4c857d840"). InnerVolumeSpecName "kube-api-access-pz4vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.057846 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" (UID: "c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.059282 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30379d2-4644-450d-bc35-d6a4c857d840-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c30379d2-4644-450d-bc35-d6a4c857d840" (UID: "c30379d2-4644-450d-bc35-d6a4c857d840"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.153029 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdbx7\" (UniqueName: \"kubernetes.io/projected/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-kube-api-access-rdbx7\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.153057 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c30379d2-4644-450d-bc35-d6a4c857d840-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.153067 4922 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.153075 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.153085 4922 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.153094 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.153102 4922 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c30379d2-4644-450d-bc35-d6a4c857d840-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.153110 4922 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.153118 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pz4vd\" (UniqueName: \"kubernetes.io/projected/c30379d2-4644-450d-bc35-d6a4c857d840-kube-api-access-pz4vd\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.732657 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-666b84d7d5-zm2nb"] Sep 29 22:39:04 crc kubenswrapper[4922]: E0929 22:39:04.733325 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" containerName="controller-manager" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.733348 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" containerName="controller-manager" Sep 29 22:39:04 crc kubenswrapper[4922]: E0929 22:39:04.733372 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30379d2-4644-450d-bc35-d6a4c857d840" containerName="route-controller-manager" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.733386 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30379d2-4644-450d-bc35-d6a4c857d840" containerName="route-controller-manager" Sep 29 22:39:04 crc kubenswrapper[4922]: E0929 22:39:04.733431 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104a5cfb-7f2a-48d1-be00-10f698d0b552" containerName="registry" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.733445 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="104a5cfb-7f2a-48d1-be00-10f698d0b552" containerName="registry" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.733651 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" containerName="controller-manager" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.733675 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30379d2-4644-450d-bc35-d6a4c857d840" containerName="route-controller-manager" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.733695 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="104a5cfb-7f2a-48d1-be00-10f698d0b552" containerName="registry" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.734298 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.737340 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd"] Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.738417 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.749621 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-666b84d7d5-zm2nb"] Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.760877 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-serving-cert\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.761009 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-proxy-ca-bundles\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.761062 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2518a83-7442-4605-9ddb-9f7886450a80-config\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.761264 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-config\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.761407 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h64z\" (UniqueName: \"kubernetes.io/projected/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-kube-api-access-4h64z\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.761455 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2518a83-7442-4605-9ddb-9f7886450a80-client-ca\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.761585 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2518a83-7442-4605-9ddb-9f7886450a80-serving-cert\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.761658 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-client-ca\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.761735 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq2p2\" (UniqueName: \"kubernetes.io/projected/d2518a83-7442-4605-9ddb-9f7886450a80-kube-api-access-fq2p2\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.763723 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd"] Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.863259 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-proxy-ca-bundles\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.863478 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2518a83-7442-4605-9ddb-9f7886450a80-config\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.863554 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-config\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.863598 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h64z\" (UniqueName: \"kubernetes.io/projected/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-kube-api-access-4h64z\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.863640 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2518a83-7442-4605-9ddb-9f7886450a80-client-ca\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.863701 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2518a83-7442-4605-9ddb-9f7886450a80-serving-cert\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.863741 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-client-ca\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.863782 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq2p2\" (UniqueName: \"kubernetes.io/projected/d2518a83-7442-4605-9ddb-9f7886450a80-kube-api-access-fq2p2\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.863841 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-serving-cert\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.864318 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-proxy-ca-bundles\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.864960 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d2518a83-7442-4605-9ddb-9f7886450a80-client-ca\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.865335 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-client-ca\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.865675 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2518a83-7442-4605-9ddb-9f7886450a80-config\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.866034 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-config\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.870518 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-serving-cert\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.878128 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2518a83-7442-4605-9ddb-9f7886450a80-serving-cert\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.881885 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq2p2\" (UniqueName: \"kubernetes.io/projected/d2518a83-7442-4605-9ddb-9f7886450a80-kube-api-access-fq2p2\") pod \"route-controller-manager-bdf957bdf-rwjkd\" (UID: \"d2518a83-7442-4605-9ddb-9f7886450a80\") " pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.886487 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h64z\" (UniqueName: \"kubernetes.io/projected/b4dbe58e-8259-43d0-9c57-bdbc8730bec5-kube-api-access-4h64z\") pod \"controller-manager-666b84d7d5-zm2nb\" (UID: \"b4dbe58e-8259-43d0-9c57-bdbc8730bec5\") " pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.957856 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7sqx6" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.958927 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.958956 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct" event={"ID":"c30379d2-4644-450d-bc35-d6a4c857d840","Type":"ContainerDied","Data":"b26589c7bc8306c4f4850f3939889b8f1ed53bf70ee21b808cf5b9bcd51d6096"} Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.959031 4922 scope.go:117] "RemoveContainer" containerID="23eb23f4044a103569ddce0d2391f49494ca9a625ee490562e49f9168e8ec0ee" Sep 29 22:39:04 crc kubenswrapper[4922]: I0929 22:39:04.992287 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct"] Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.002345 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gckct"] Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.007054 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7sqx6"] Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.012804 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7sqx6"] Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.066961 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.099558 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.388644 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd"] Sep 29 22:39:05 crc kubenswrapper[4922]: W0929 22:39:05.398852 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2518a83_7442_4605_9ddb_9f7886450a80.slice/crio-143634ece826c9d3d50ed6ca40e3ddb022be94f2a6de6bd6b5641bb22d01e6a0 WatchSource:0}: Error finding container 143634ece826c9d3d50ed6ca40e3ddb022be94f2a6de6bd6b5641bb22d01e6a0: Status 404 returned error can't find the container with id 143634ece826c9d3d50ed6ca40e3ddb022be94f2a6de6bd6b5641bb22d01e6a0 Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.554334 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-666b84d7d5-zm2nb"] Sep 29 22:39:05 crc kubenswrapper[4922]: W0929 22:39:05.558666 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb4dbe58e_8259_43d0_9c57_bdbc8730bec5.slice/crio-24c88f88561ee530dbdc2c2a869377e6c4bad1198ba25e3ede4e6fa284dc7c24 WatchSource:0}: Error finding container 24c88f88561ee530dbdc2c2a869377e6c4bad1198ba25e3ede4e6fa284dc7c24: Status 404 returned error can't find the container with id 24c88f88561ee530dbdc2c2a869377e6c4bad1198ba25e3ede4e6fa284dc7c24 Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.966378 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" event={"ID":"d2518a83-7442-4605-9ddb-9f7886450a80","Type":"ContainerStarted","Data":"9003ff2a650a49a8d21e9c0dffebe786b6e5c3d05176f58e1895bd356b96a217"} Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.966773 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" event={"ID":"d2518a83-7442-4605-9ddb-9f7886450a80","Type":"ContainerStarted","Data":"143634ece826c9d3d50ed6ca40e3ddb022be94f2a6de6bd6b5641bb22d01e6a0"} Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.966796 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.970000 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" event={"ID":"b4dbe58e-8259-43d0-9c57-bdbc8730bec5","Type":"ContainerStarted","Data":"4a0fb63f0c9082d66992413040dd0076d4c0b61c6141ac8865fa74cd50eda196"} Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.970034 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" event={"ID":"b4dbe58e-8259-43d0-9c57-bdbc8730bec5","Type":"ContainerStarted","Data":"24c88f88561ee530dbdc2c2a869377e6c4bad1198ba25e3ede4e6fa284dc7c24"} Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.970256 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.971804 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.976228 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" Sep 29 22:39:05 crc kubenswrapper[4922]: I0929 22:39:05.987022 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-bdf957bdf-rwjkd" podStartSLOduration=2.986995849 podStartE2EDuration="2.986995849s" podCreationTimestamp="2025-09-29 22:39:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:39:05.985886272 +0000 UTC m=+750.296175085" watchObservedRunningTime="2025-09-29 22:39:05.986995849 +0000 UTC m=+750.297284692" Sep 29 22:39:06 crc kubenswrapper[4922]: I0929 22:39:06.042974 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-666b84d7d5-zm2nb" podStartSLOduration=3.042952362 podStartE2EDuration="3.042952362s" podCreationTimestamp="2025-09-29 22:39:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:39:06.042675475 +0000 UTC m=+750.352964328" watchObservedRunningTime="2025-09-29 22:39:06.042952362 +0000 UTC m=+750.353241175" Sep 29 22:39:06 crc kubenswrapper[4922]: I0929 22:39:06.432498 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c30379d2-4644-450d-bc35-d6a4c857d840" path="/var/lib/kubelet/pods/c30379d2-4644-450d-bc35-d6a4c857d840/volumes" Sep 29 22:39:06 crc kubenswrapper[4922]: I0929 22:39:06.433525 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc" path="/var/lib/kubelet/pods/c68fe1bd-70ef-4d9d-8163-7eb2bd8e9abc/volumes" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.551936 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rxqq9"] Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.554383 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.571754 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rxqq9"] Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.617360 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-catalog-content\") pod \"certified-operators-rxqq9\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.617472 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-utilities\") pod \"certified-operators-rxqq9\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.617629 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9k65\" (UniqueName: \"kubernetes.io/projected/e338c204-f412-4661-a8de-e812dfa80a8d-kube-api-access-x9k65\") pod \"certified-operators-rxqq9\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.718731 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-catalog-content\") pod \"certified-operators-rxqq9\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.718801 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-utilities\") pod \"certified-operators-rxqq9\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.719351 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-catalog-content\") pod \"certified-operators-rxqq9\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.719474 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-utilities\") pod \"certified-operators-rxqq9\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.719708 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9k65\" (UniqueName: \"kubernetes.io/projected/e338c204-f412-4661-a8de-e812dfa80a8d-kube-api-access-x9k65\") pod \"certified-operators-rxqq9\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.745077 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9k65\" (UniqueName: \"kubernetes.io/projected/e338c204-f412-4661-a8de-e812dfa80a8d-kube-api-access-x9k65\") pod \"certified-operators-rxqq9\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:07 crc kubenswrapper[4922]: I0929 22:39:07.876930 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:08 crc kubenswrapper[4922]: I0929 22:39:08.310644 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rxqq9"] Sep 29 22:39:08 crc kubenswrapper[4922]: I0929 22:39:08.993699 4922 generic.go:334] "Generic (PLEG): container finished" podID="e338c204-f412-4661-a8de-e812dfa80a8d" containerID="f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d" exitCode=0 Sep 29 22:39:08 crc kubenswrapper[4922]: I0929 22:39:08.993838 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxqq9" event={"ID":"e338c204-f412-4661-a8de-e812dfa80a8d","Type":"ContainerDied","Data":"f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d"} Sep 29 22:39:08 crc kubenswrapper[4922]: I0929 22:39:08.994494 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxqq9" event={"ID":"e338c204-f412-4661-a8de-e812dfa80a8d","Type":"ContainerStarted","Data":"8022b09422921d012110e1440885391f6649091ca746821d5c5ebeb31bf5600a"} Sep 29 22:39:08 crc kubenswrapper[4922]: I0929 22:39:08.997565 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 22:39:09 crc kubenswrapper[4922]: I0929 22:39:09.961854 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tqsst"] Sep 29 22:39:09 crc kubenswrapper[4922]: I0929 22:39:09.962937 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovn-controller" containerID="cri-o://791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499" gracePeriod=30 Sep 29 22:39:09 crc kubenswrapper[4922]: I0929 22:39:09.962964 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="nbdb" containerID="cri-o://59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153" gracePeriod=30 Sep 29 22:39:09 crc kubenswrapper[4922]: I0929 22:39:09.963111 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="northd" containerID="cri-o://cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a" gracePeriod=30 Sep 29 22:39:09 crc kubenswrapper[4922]: I0929 22:39:09.963183 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78" gracePeriod=30 Sep 29 22:39:09 crc kubenswrapper[4922]: I0929 22:39:09.963254 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kube-rbac-proxy-node" containerID="cri-o://46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0" gracePeriod=30 Sep 29 22:39:09 crc kubenswrapper[4922]: I0929 22:39:09.963317 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovn-acl-logging" containerID="cri-o://2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad" gracePeriod=30 Sep 29 22:39:09 crc kubenswrapper[4922]: I0929 22:39:09.963381 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="sbdb" containerID="cri-o://ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b" gracePeriod=30 Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.008258 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxqq9" event={"ID":"e338c204-f412-4661-a8de-e812dfa80a8d","Type":"ContainerStarted","Data":"5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60"} Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.023755 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" containerID="cri-o://2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb" gracePeriod=30 Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.362102 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/3.log" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.366183 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovn-acl-logging/0.log" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.366727 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovn-controller/0.log" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.367240 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.434809 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-8gnqr"] Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435059 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435080 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435092 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kube-rbac-proxy-node" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435100 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kube-rbac-proxy-node" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435109 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kubecfg-setup" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435115 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kubecfg-setup" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435124 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="sbdb" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435129 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="sbdb" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435136 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovn-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435142 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovn-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435152 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="nbdb" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435158 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="nbdb" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435169 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="northd" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435175 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="northd" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435181 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435187 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435197 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435202 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435209 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovn-acl-logging" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435215 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovn-acl-logging" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435224 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435229 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435236 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435243 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435330 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435339 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="nbdb" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435351 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435359 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435368 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="northd" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435376 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovn-acl-logging" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435383 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="sbdb" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435405 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovn-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435414 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435422 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="kube-rbac-proxy-node" Sep 29 22:39:10 crc kubenswrapper[4922]: E0929 22:39:10.435538 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435550 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435667 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.435889 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerName="ovnkube-controller" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.438124 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462458 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-systemd-units\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462494 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-netns\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462520 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-netd\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462546 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovn-node-metrics-cert\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462562 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-ovn\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462553 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462583 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-kubelet\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462614 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462643 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462662 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462693 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-var-lib-openvswitch\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462729 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-openvswitch\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462773 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-script-lib\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462821 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-slash\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462852 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjrw4\" (UniqueName: \"kubernetes.io/projected/ef991319-1ee8-4778-8567-9b4e8ff7600c-kube-api-access-zjrw4\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462899 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-log-socket\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462931 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-bin\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462968 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-systemd\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.462956 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463010 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-node-log\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463029 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-slash" (OuterVolumeSpecName: "host-slash") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463044 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-config\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463063 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463078 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-ovn-kubernetes\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463093 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463115 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-env-overrides\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463150 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-etc-openvswitch\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463181 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ef991319-1ee8-4778-8567-9b4e8ff7600c\" (UID: \"ef991319-1ee8-4778-8567-9b4e8ff7600c\") " Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463431 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8011f344-283f-46aa-9b08-03b53d59d2ed-ovnkube-config\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463450 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-node-log" (OuterVolumeSpecName: "node-log") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463492 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-kubelet\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463523 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-var-lib-openvswitch\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463553 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463555 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8011f344-283f-46aa-9b08-03b53d59d2ed-ovnkube-script-lib\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463632 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8011f344-283f-46aa-9b08-03b53d59d2ed-ovn-node-metrics-cert\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463672 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-systemd-units\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463688 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-run-netns\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463713 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-node-log\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463743 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-run-openvswitch\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463758 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-cni-bin\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463792 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-etc-openvswitch\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463819 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8011f344-283f-46aa-9b08-03b53d59d2ed-env-overrides\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463842 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-log-socket\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463868 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-slash\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463893 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-run-ovn\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463917 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-cni-netd\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.463988 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-run-ovn-kubernetes\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464033 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464058 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl8mx\" (UniqueName: \"kubernetes.io/projected/8011f344-283f-46aa-9b08-03b53d59d2ed-kube-api-access-wl8mx\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464084 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-run-systemd\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464147 4922 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464160 4922 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464169 4922 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464178 4922 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-slash\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464186 4922 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464195 4922 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464204 4922 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464201 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464265 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464294 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464317 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-log-socket" (OuterVolumeSpecName: "log-socket") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464653 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464685 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464213 4922 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464843 4922 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.464933 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.468743 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.470175 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef991319-1ee8-4778-8567-9b4e8ff7600c-kube-api-access-zjrw4" (OuterVolumeSpecName: "kube-api-access-zjrw4") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "kube-api-access-zjrw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.478929 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "ef991319-1ee8-4778-8567-9b4e8ff7600c" (UID: "ef991319-1ee8-4778-8567-9b4e8ff7600c"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.565925 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8011f344-283f-46aa-9b08-03b53d59d2ed-ovnkube-config\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.565963 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-kubelet\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.565981 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-var-lib-openvswitch\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.565996 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8011f344-283f-46aa-9b08-03b53d59d2ed-ovnkube-script-lib\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566015 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8011f344-283f-46aa-9b08-03b53d59d2ed-ovn-node-metrics-cert\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566031 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-systemd-units\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566047 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-run-netns\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566065 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-node-log\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566083 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-run-openvswitch\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566100 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-cni-bin\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566119 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-etc-openvswitch\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566135 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8011f344-283f-46aa-9b08-03b53d59d2ed-env-overrides\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566151 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-log-socket\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566147 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-var-lib-openvswitch\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566189 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-slash\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566199 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-systemd-units\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566223 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-node-log\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566220 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-cni-bin\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566247 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-etc-openvswitch\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566235 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-log-socket\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566213 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-run-netns\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566196 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-kubelet\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566166 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-slash\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566314 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-run-openvswitch\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566450 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-run-ovn\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566479 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-cni-netd\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566520 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-run-ovn-kubernetes\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566551 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566577 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl8mx\" (UniqueName: \"kubernetes.io/projected/8011f344-283f-46aa-9b08-03b53d59d2ed-kube-api-access-wl8mx\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566597 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-run-systemd\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566607 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-cni-netd\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566649 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjrw4\" (UniqueName: \"kubernetes.io/projected/ef991319-1ee8-4778-8567-9b4e8ff7600c-kube-api-access-zjrw4\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566665 4922 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-log-socket\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566676 4922 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566687 4922 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566698 4922 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-node-log\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566696 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566708 4922 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566656 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-host-run-ovn-kubernetes\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566750 4922 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566788 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8011f344-283f-46aa-9b08-03b53d59d2ed-env-overrides\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566733 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-run-systemd\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566810 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8011f344-283f-46aa-9b08-03b53d59d2ed-ovnkube-config\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566924 4922 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef991319-1ee8-4778-8567-9b4e8ff7600c-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566962 4922 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.566987 4922 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ef991319-1ee8-4778-8567-9b4e8ff7600c-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.567011 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ef991319-1ee8-4778-8567-9b4e8ff7600c-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.567038 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8011f344-283f-46aa-9b08-03b53d59d2ed-ovnkube-script-lib\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.567093 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8011f344-283f-46aa-9b08-03b53d59d2ed-run-ovn\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.570457 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8011f344-283f-46aa-9b08-03b53d59d2ed-ovn-node-metrics-cert\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.594923 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl8mx\" (UniqueName: \"kubernetes.io/projected/8011f344-283f-46aa-9b08-03b53d59d2ed-kube-api-access-wl8mx\") pod \"ovnkube-node-8gnqr\" (UID: \"8011f344-283f-46aa-9b08-03b53d59d2ed\") " pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.755748 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:10 crc kubenswrapper[4922]: W0929 22:39:10.786452 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8011f344_283f_46aa_9b08_03b53d59d2ed.slice/crio-a08bed71afdc30ca6760601703b2225c65a34b985d6b471a736ba53721aa9654 WatchSource:0}: Error finding container a08bed71afdc30ca6760601703b2225c65a34b985d6b471a736ba53721aa9654: Status 404 returned error can't find the container with id a08bed71afdc30ca6760601703b2225c65a34b985d6b471a736ba53721aa9654 Sep 29 22:39:10 crc kubenswrapper[4922]: I0929 22:39:10.875265 4922 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.014261 4922 generic.go:334] "Generic (PLEG): container finished" podID="8011f344-283f-46aa-9b08-03b53d59d2ed" containerID="71bf82efc541c1b1e2f7311a0c1e2771e1426417950eebdc78349932bb4077d4" exitCode=0 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.014584 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerDied","Data":"71bf82efc541c1b1e2f7311a0c1e2771e1426417950eebdc78349932bb4077d4"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.014608 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerStarted","Data":"a08bed71afdc30ca6760601703b2225c65a34b985d6b471a736ba53721aa9654"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.017506 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-md9pf_6edd2cff-7363-4e99-8cc3-3db297410bce/kube-multus/2.log" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.018096 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-md9pf_6edd2cff-7363-4e99-8cc3-3db297410bce/kube-multus/1.log" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.018132 4922 generic.go:334] "Generic (PLEG): container finished" podID="6edd2cff-7363-4e99-8cc3-3db297410bce" containerID="491978895dbeae4eb647475fdf5d8fe66c207f8194037b81d47c50656591ebbc" exitCode=2 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.018208 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-md9pf" event={"ID":"6edd2cff-7363-4e99-8cc3-3db297410bce","Type":"ContainerDied","Data":"491978895dbeae4eb647475fdf5d8fe66c207f8194037b81d47c50656591ebbc"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.018553 4922 scope.go:117] "RemoveContainer" containerID="940494b8a5f1116e3683509e130a872c932d817f900520c1c5e0dac91b23604e" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.018951 4922 scope.go:117] "RemoveContainer" containerID="491978895dbeae4eb647475fdf5d8fe66c207f8194037b81d47c50656591ebbc" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.020843 4922 generic.go:334] "Generic (PLEG): container finished" podID="e338c204-f412-4661-a8de-e812dfa80a8d" containerID="5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60" exitCode=0 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.020910 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxqq9" event={"ID":"e338c204-f412-4661-a8de-e812dfa80a8d","Type":"ContainerDied","Data":"5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.028136 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovnkube-controller/3.log" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.031384 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovn-acl-logging/0.log" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.031751 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-tqsst_ef991319-1ee8-4778-8567-9b4e8ff7600c/ovn-controller/0.log" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032282 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb" exitCode=0 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032301 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b" exitCode=0 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032315 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153" exitCode=0 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032323 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a" exitCode=0 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032332 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78" exitCode=0 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032340 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0" exitCode=0 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032346 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad" exitCode=143 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032353 4922 generic.go:334] "Generic (PLEG): container finished" podID="ef991319-1ee8-4778-8567-9b4e8ff7600c" containerID="791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499" exitCode=143 Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032372 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032421 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032432 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032441 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032450 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032459 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032469 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032481 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032486 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032492 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032497 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032502 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032507 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032512 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032517 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032522 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032528 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032550 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032557 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032562 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032567 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032572 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032577 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032581 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032586 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032591 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032596 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032603 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032610 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032616 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032621 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032626 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032631 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032636 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032640 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032646 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032651 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032656 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032663 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" event={"ID":"ef991319-1ee8-4778-8567-9b4e8ff7600c","Type":"ContainerDied","Data":"875e8ea62473009c543bb017a8284b82ff45aae737a659d7fbf6143b33d2fa38"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032671 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032676 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032682 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032687 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032692 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032696 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032701 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032706 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032710 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032716 4922 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.032810 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tqsst" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.078577 4922 scope.go:117] "RemoveContainer" containerID="2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.095270 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tqsst"] Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.100213 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.107861 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tqsst"] Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.124790 4922 scope.go:117] "RemoveContainer" containerID="ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.139643 4922 scope.go:117] "RemoveContainer" containerID="59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.161139 4922 scope.go:117] "RemoveContainer" containerID="cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.174072 4922 scope.go:117] "RemoveContainer" containerID="37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.186595 4922 scope.go:117] "RemoveContainer" containerID="46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.199324 4922 scope.go:117] "RemoveContainer" containerID="2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.223348 4922 scope.go:117] "RemoveContainer" containerID="791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.246685 4922 scope.go:117] "RemoveContainer" containerID="3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.271997 4922 scope.go:117] "RemoveContainer" containerID="2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.272381 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb\": container with ID starting with 2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb not found: ID does not exist" containerID="2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.272423 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} err="failed to get container status \"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb\": rpc error: code = NotFound desc = could not find container \"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb\": container with ID starting with 2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.272441 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.272910 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\": container with ID starting with 607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3 not found: ID does not exist" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.272954 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} err="failed to get container status \"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\": rpc error: code = NotFound desc = could not find container \"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\": container with ID starting with 607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.272982 4922 scope.go:117] "RemoveContainer" containerID="ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.273253 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\": container with ID starting with ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b not found: ID does not exist" containerID="ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.273280 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} err="failed to get container status \"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\": rpc error: code = NotFound desc = could not find container \"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\": container with ID starting with ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.273296 4922 scope.go:117] "RemoveContainer" containerID="59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.273566 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\": container with ID starting with 59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153 not found: ID does not exist" containerID="59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.273591 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} err="failed to get container status \"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\": rpc error: code = NotFound desc = could not find container \"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\": container with ID starting with 59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.273608 4922 scope.go:117] "RemoveContainer" containerID="cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.273813 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\": container with ID starting with cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a not found: ID does not exist" containerID="cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.273840 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} err="failed to get container status \"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\": rpc error: code = NotFound desc = could not find container \"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\": container with ID starting with cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.273856 4922 scope.go:117] "RemoveContainer" containerID="37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.274081 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\": container with ID starting with 37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78 not found: ID does not exist" containerID="37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.274115 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} err="failed to get container status \"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\": rpc error: code = NotFound desc = could not find container \"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\": container with ID starting with 37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.274137 4922 scope.go:117] "RemoveContainer" containerID="46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.274353 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\": container with ID starting with 46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0 not found: ID does not exist" containerID="46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.274375 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} err="failed to get container status \"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\": rpc error: code = NotFound desc = could not find container \"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\": container with ID starting with 46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.274401 4922 scope.go:117] "RemoveContainer" containerID="2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.274652 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\": container with ID starting with 2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad not found: ID does not exist" containerID="2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.274673 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} err="failed to get container status \"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\": rpc error: code = NotFound desc = could not find container \"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\": container with ID starting with 2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.274684 4922 scope.go:117] "RemoveContainer" containerID="791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.274945 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\": container with ID starting with 791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499 not found: ID does not exist" containerID="791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.274993 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} err="failed to get container status \"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\": rpc error: code = NotFound desc = could not find container \"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\": container with ID starting with 791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.275021 4922 scope.go:117] "RemoveContainer" containerID="3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f" Sep 29 22:39:11 crc kubenswrapper[4922]: E0929 22:39:11.275453 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\": container with ID starting with 3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f not found: ID does not exist" containerID="3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.275479 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} err="failed to get container status \"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\": rpc error: code = NotFound desc = could not find container \"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\": container with ID starting with 3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.275492 4922 scope.go:117] "RemoveContainer" containerID="2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.275738 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} err="failed to get container status \"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb\": rpc error: code = NotFound desc = could not find container \"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb\": container with ID starting with 2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.275764 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.276129 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} err="failed to get container status \"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\": rpc error: code = NotFound desc = could not find container \"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\": container with ID starting with 607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.276150 4922 scope.go:117] "RemoveContainer" containerID="ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.276501 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} err="failed to get container status \"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\": rpc error: code = NotFound desc = could not find container \"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\": container with ID starting with ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.276526 4922 scope.go:117] "RemoveContainer" containerID="59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.276780 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} err="failed to get container status \"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\": rpc error: code = NotFound desc = could not find container \"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\": container with ID starting with 59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.276800 4922 scope.go:117] "RemoveContainer" containerID="cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.277003 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} err="failed to get container status \"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\": rpc error: code = NotFound desc = could not find container \"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\": container with ID starting with cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.277029 4922 scope.go:117] "RemoveContainer" containerID="37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.277263 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} err="failed to get container status \"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\": rpc error: code = NotFound desc = could not find container \"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\": container with ID starting with 37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.277288 4922 scope.go:117] "RemoveContainer" containerID="46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.277582 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} err="failed to get container status \"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\": rpc error: code = NotFound desc = could not find container \"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\": container with ID starting with 46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.277602 4922 scope.go:117] "RemoveContainer" containerID="2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.277861 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} err="failed to get container status \"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\": rpc error: code = NotFound desc = could not find container \"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\": container with ID starting with 2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.277880 4922 scope.go:117] "RemoveContainer" containerID="791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.278101 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} err="failed to get container status \"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\": rpc error: code = NotFound desc = could not find container \"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\": container with ID starting with 791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.278139 4922 scope.go:117] "RemoveContainer" containerID="3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.278408 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} err="failed to get container status \"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\": rpc error: code = NotFound desc = could not find container \"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\": container with ID starting with 3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.278426 4922 scope.go:117] "RemoveContainer" containerID="2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.278694 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} err="failed to get container status \"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb\": rpc error: code = NotFound desc = could not find container \"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb\": container with ID starting with 2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.278714 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.278942 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} err="failed to get container status \"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\": rpc error: code = NotFound desc = could not find container \"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\": container with ID starting with 607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.278968 4922 scope.go:117] "RemoveContainer" containerID="ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.279201 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} err="failed to get container status \"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\": rpc error: code = NotFound desc = could not find container \"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\": container with ID starting with ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.279220 4922 scope.go:117] "RemoveContainer" containerID="59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.279500 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} err="failed to get container status \"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\": rpc error: code = NotFound desc = could not find container \"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\": container with ID starting with 59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.279525 4922 scope.go:117] "RemoveContainer" containerID="cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.279758 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} err="failed to get container status \"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\": rpc error: code = NotFound desc = could not find container \"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\": container with ID starting with cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.279777 4922 scope.go:117] "RemoveContainer" containerID="37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.280313 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} err="failed to get container status \"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\": rpc error: code = NotFound desc = could not find container \"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\": container with ID starting with 37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.280342 4922 scope.go:117] "RemoveContainer" containerID="46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.280952 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} err="failed to get container status \"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\": rpc error: code = NotFound desc = could not find container \"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\": container with ID starting with 46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.280975 4922 scope.go:117] "RemoveContainer" containerID="2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.281200 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} err="failed to get container status \"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\": rpc error: code = NotFound desc = could not find container \"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\": container with ID starting with 2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.281224 4922 scope.go:117] "RemoveContainer" containerID="791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.281521 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} err="failed to get container status \"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\": rpc error: code = NotFound desc = could not find container \"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\": container with ID starting with 791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.281544 4922 scope.go:117] "RemoveContainer" containerID="3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.281762 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} err="failed to get container status \"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\": rpc error: code = NotFound desc = could not find container \"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\": container with ID starting with 3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.281785 4922 scope.go:117] "RemoveContainer" containerID="2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.282031 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb"} err="failed to get container status \"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb\": rpc error: code = NotFound desc = could not find container \"2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb\": container with ID starting with 2bac23b7c7fb4fbdc5d3ae48ae92fe8b2899c02c2d92aba5ed22754925a7eeeb not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.282064 4922 scope.go:117] "RemoveContainer" containerID="607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.282308 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3"} err="failed to get container status \"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\": rpc error: code = NotFound desc = could not find container \"607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3\": container with ID starting with 607636f5b83a39949abb340bdd9ad1b6d26ca18a10db8f030040e487a8f94ae3 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.282330 4922 scope.go:117] "RemoveContainer" containerID="ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.282595 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b"} err="failed to get container status \"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\": rpc error: code = NotFound desc = could not find container \"ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b\": container with ID starting with ae71c2e32d580d9a06f4d1c5a06319ebeb02eaf9a9a0ab81b590632f7663e37b not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.282617 4922 scope.go:117] "RemoveContainer" containerID="59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.282801 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153"} err="failed to get container status \"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\": rpc error: code = NotFound desc = could not find container \"59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153\": container with ID starting with 59a2f407a85af60059092f0d99b4ad123e5c9eaf8506ec3746d096e88ac33153 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.282823 4922 scope.go:117] "RemoveContainer" containerID="cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.283044 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a"} err="failed to get container status \"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\": rpc error: code = NotFound desc = could not find container \"cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a\": container with ID starting with cfacd062a5eac1c7b1809c6d9ac1ca06e4d4ba17e29121eeb3e386b69875bc9a not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.283063 4922 scope.go:117] "RemoveContainer" containerID="37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.283374 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78"} err="failed to get container status \"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\": rpc error: code = NotFound desc = could not find container \"37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78\": container with ID starting with 37426d77004ab55c49e0c42270e9b8eeb96adc30274b8c80bf34786ca2001b78 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.283406 4922 scope.go:117] "RemoveContainer" containerID="46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.283880 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0"} err="failed to get container status \"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\": rpc error: code = NotFound desc = could not find container \"46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0\": container with ID starting with 46dc115c6bacd573bb09bef53213e9e06e4125736dcc7f875c7793929ebb89c0 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.283902 4922 scope.go:117] "RemoveContainer" containerID="2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.284259 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad"} err="failed to get container status \"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\": rpc error: code = NotFound desc = could not find container \"2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad\": container with ID starting with 2af1c2ab0716abb9d593bad509d1f56bd7f8803fa8dd236c8e00bf66a468c8ad not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.284283 4922 scope.go:117] "RemoveContainer" containerID="791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.284615 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499"} err="failed to get container status \"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\": rpc error: code = NotFound desc = could not find container \"791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499\": container with ID starting with 791330528a33f98fde440c0195ed1e4eb3dda07c51856a9b7f836a0c563bf499 not found: ID does not exist" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.284634 4922 scope.go:117] "RemoveContainer" containerID="3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f" Sep 29 22:39:11 crc kubenswrapper[4922]: I0929 22:39:11.284949 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f"} err="failed to get container status \"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\": rpc error: code = NotFound desc = could not find container \"3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f\": container with ID starting with 3d5cc94863fc0a1360e01f1da0d56a45d4bda2de1c3c9e469f0d8bf585d41a8f not found: ID does not exist" Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.046696 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxqq9" event={"ID":"e338c204-f412-4661-a8de-e812dfa80a8d","Type":"ContainerStarted","Data":"9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6"} Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.052014 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-md9pf_6edd2cff-7363-4e99-8cc3-3db297410bce/kube-multus/2.log" Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.052119 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-md9pf" event={"ID":"6edd2cff-7363-4e99-8cc3-3db297410bce","Type":"ContainerStarted","Data":"0ac0956c07a20dbc52b16e8388b31df9ca4d288ad47a0f66d4b245afb61cbf3d"} Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.056415 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerStarted","Data":"133d13e32bc11d66da7529ff44b4fc24a7b0869c377721e97c04fcd942353f7f"} Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.056500 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerStarted","Data":"cf3a60d73be2ef8caddd7fdeac1ca69787cf5f4ea0c146afcf41a848b41e3879"} Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.056524 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerStarted","Data":"612f32ec9d422a613421740cb5847acec8ba1f42d64639b5e2f4b8942a52d572"} Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.056542 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerStarted","Data":"71352d6270b142b6a02867f25ca54d6333cdae43b40400413ac2ddbb354545ea"} Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.056559 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerStarted","Data":"4af3de14ef89b92fd1a650a439142af7c99c4b21faa74ffee0dbb8ee1dea2f83"} Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.056578 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerStarted","Data":"b02e95814249a1600d98dbc8d7ea5b2dc187e79a36981339f127edc8a05c7159"} Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.073426 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rxqq9" podStartSLOduration=2.545807956 podStartE2EDuration="5.073380917s" podCreationTimestamp="2025-09-29 22:39:07 +0000 UTC" firstStartedPulling="2025-09-29 22:39:08.996920366 +0000 UTC m=+753.307209209" lastFinishedPulling="2025-09-29 22:39:11.524493347 +0000 UTC m=+755.834782170" observedRunningTime="2025-09-29 22:39:12.073164891 +0000 UTC m=+756.383453704" watchObservedRunningTime="2025-09-29 22:39:12.073380917 +0000 UTC m=+756.383669760" Sep 29 22:39:12 crc kubenswrapper[4922]: I0929 22:39:12.431533 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef991319-1ee8-4778-8567-9b4e8ff7600c" path="/var/lib/kubelet/pods/ef991319-1ee8-4778-8567-9b4e8ff7600c/volumes" Sep 29 22:39:14 crc kubenswrapper[4922]: I0929 22:39:14.072695 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerStarted","Data":"8a3bc9e414b244d1112d9233820530af467441e752eb5b9e8aece4cdbc3549b0"} Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.108655 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" event={"ID":"8011f344-283f-46aa-9b08-03b53d59d2ed","Type":"ContainerStarted","Data":"9069a847da485c3894a4fcec200ee1a067af76c8e48f82691bee56788dcb0402"} Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.110376 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.110420 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.110500 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.150555 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.153116 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" podStartSLOduration=7.153097514 podStartE2EDuration="7.153097514s" podCreationTimestamp="2025-09-29 22:39:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:39:17.14888996 +0000 UTC m=+761.459178793" watchObservedRunningTime="2025-09-29 22:39:17.153097514 +0000 UTC m=+761.463386337" Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.160291 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.877713 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.878116 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:17 crc kubenswrapper[4922]: I0929 22:39:17.947358 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.192085 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.249880 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rxqq9"] Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.881619 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-rrsl6"] Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.882966 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.885449 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.887357 4922 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-w4jls" Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.887708 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.893685 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.896329 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-rrsl6"] Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.989094 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-node-mnt\") pod \"crc-storage-crc-rrsl6\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.989220 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjqhj\" (UniqueName: \"kubernetes.io/projected/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-kube-api-access-hjqhj\") pod \"crc-storage-crc-rrsl6\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:18 crc kubenswrapper[4922]: I0929 22:39:18.989260 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-crc-storage\") pod \"crc-storage-crc-rrsl6\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: I0929 22:39:19.090472 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-crc-storage\") pod \"crc-storage-crc-rrsl6\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: I0929 22:39:19.090572 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-node-mnt\") pod \"crc-storage-crc-rrsl6\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: I0929 22:39:19.090676 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjqhj\" (UniqueName: \"kubernetes.io/projected/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-kube-api-access-hjqhj\") pod \"crc-storage-crc-rrsl6\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: I0929 22:39:19.091761 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-node-mnt\") pod \"crc-storage-crc-rrsl6\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: I0929 22:39:19.092109 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-crc-storage\") pod \"crc-storage-crc-rrsl6\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: I0929 22:39:19.126041 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjqhj\" (UniqueName: \"kubernetes.io/projected/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-kube-api-access-hjqhj\") pod \"crc-storage-crc-rrsl6\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: I0929 22:39:19.210144 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: E0929 22:39:19.249855 4922 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-rrsl6_crc-storage_74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85_0(a6d95321a775c4707df546849a8ce328f05f5ecbdc6f389330f3cbacb679203c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 22:39:19 crc kubenswrapper[4922]: E0929 22:39:19.249947 4922 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-rrsl6_crc-storage_74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85_0(a6d95321a775c4707df546849a8ce328f05f5ecbdc6f389330f3cbacb679203c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: E0929 22:39:19.249973 4922 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-rrsl6_crc-storage_74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85_0(a6d95321a775c4707df546849a8ce328f05f5ecbdc6f389330f3cbacb679203c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:19 crc kubenswrapper[4922]: E0929 22:39:19.250056 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-rrsl6_crc-storage(74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-rrsl6_crc-storage(74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-rrsl6_crc-storage_74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85_0(a6d95321a775c4707df546849a8ce328f05f5ecbdc6f389330f3cbacb679203c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-rrsl6" podUID="74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.127510 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.127706 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rxqq9" podUID="e338c204-f412-4661-a8de-e812dfa80a8d" containerName="registry-server" containerID="cri-o://9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6" gracePeriod=2 Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.129833 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:20 crc kubenswrapper[4922]: E0929 22:39:20.167368 4922 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-rrsl6_crc-storage_74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85_0(11d916846af9b5bc61d542a89dfbe46030e4ed06ee43b5276c25011e176008cf): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 22:39:20 crc kubenswrapper[4922]: E0929 22:39:20.167486 4922 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-rrsl6_crc-storage_74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85_0(11d916846af9b5bc61d542a89dfbe46030e4ed06ee43b5276c25011e176008cf): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:20 crc kubenswrapper[4922]: E0929 22:39:20.167525 4922 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-rrsl6_crc-storage_74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85_0(11d916846af9b5bc61d542a89dfbe46030e4ed06ee43b5276c25011e176008cf): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:20 crc kubenswrapper[4922]: E0929 22:39:20.167603 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-rrsl6_crc-storage(74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-rrsl6_crc-storage(74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-rrsl6_crc-storage_74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85_0(11d916846af9b5bc61d542a89dfbe46030e4ed06ee43b5276c25011e176008cf): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-rrsl6" podUID="74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.335142 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.416013 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-catalog-content\") pod \"e338c204-f412-4661-a8de-e812dfa80a8d\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.416091 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-utilities\") pod \"e338c204-f412-4661-a8de-e812dfa80a8d\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.416130 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9k65\" (UniqueName: \"kubernetes.io/projected/e338c204-f412-4661-a8de-e812dfa80a8d-kube-api-access-x9k65\") pod \"e338c204-f412-4661-a8de-e812dfa80a8d\" (UID: \"e338c204-f412-4661-a8de-e812dfa80a8d\") " Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.417486 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-utilities" (OuterVolumeSpecName: "utilities") pod "e338c204-f412-4661-a8de-e812dfa80a8d" (UID: "e338c204-f412-4661-a8de-e812dfa80a8d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.424284 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e338c204-f412-4661-a8de-e812dfa80a8d-kube-api-access-x9k65" (OuterVolumeSpecName: "kube-api-access-x9k65") pod "e338c204-f412-4661-a8de-e812dfa80a8d" (UID: "e338c204-f412-4661-a8de-e812dfa80a8d"). InnerVolumeSpecName "kube-api-access-x9k65". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.518072 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.518112 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9k65\" (UniqueName: \"kubernetes.io/projected/e338c204-f412-4661-a8de-e812dfa80a8d-kube-api-access-x9k65\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.699610 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e338c204-f412-4661-a8de-e812dfa80a8d" (UID: "e338c204-f412-4661-a8de-e812dfa80a8d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.721759 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e338c204-f412-4661-a8de-e812dfa80a8d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.815552 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8fxhg"] Sep 29 22:39:20 crc kubenswrapper[4922]: E0929 22:39:20.816326 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e338c204-f412-4661-a8de-e812dfa80a8d" containerName="registry-server" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.816349 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e338c204-f412-4661-a8de-e812dfa80a8d" containerName="registry-server" Sep 29 22:39:20 crc kubenswrapper[4922]: E0929 22:39:20.816384 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e338c204-f412-4661-a8de-e812dfa80a8d" containerName="extract-content" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.816403 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e338c204-f412-4661-a8de-e812dfa80a8d" containerName="extract-content" Sep 29 22:39:20 crc kubenswrapper[4922]: E0929 22:39:20.816447 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e338c204-f412-4661-a8de-e812dfa80a8d" containerName="extract-utilities" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.816461 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e338c204-f412-4661-a8de-e812dfa80a8d" containerName="extract-utilities" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.816636 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e338c204-f412-4661-a8de-e812dfa80a8d" containerName="registry-server" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.818228 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.818986 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8fxhg"] Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.924756 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7hnf\" (UniqueName: \"kubernetes.io/projected/a4aa042b-4062-4fec-a47b-d9d3fac94615-kube-api-access-q7hnf\") pod \"community-operators-8fxhg\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.924939 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-utilities\") pod \"community-operators-8fxhg\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:20 crc kubenswrapper[4922]: I0929 22:39:20.924980 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-catalog-content\") pod \"community-operators-8fxhg\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.026160 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7hnf\" (UniqueName: \"kubernetes.io/projected/a4aa042b-4062-4fec-a47b-d9d3fac94615-kube-api-access-q7hnf\") pod \"community-operators-8fxhg\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.026316 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-utilities\") pod \"community-operators-8fxhg\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.026357 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-catalog-content\") pod \"community-operators-8fxhg\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.027029 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-utilities\") pod \"community-operators-8fxhg\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.027124 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-catalog-content\") pod \"community-operators-8fxhg\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.049066 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7hnf\" (UniqueName: \"kubernetes.io/projected/a4aa042b-4062-4fec-a47b-d9d3fac94615-kube-api-access-q7hnf\") pod \"community-operators-8fxhg\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.136017 4922 generic.go:334] "Generic (PLEG): container finished" podID="e338c204-f412-4661-a8de-e812dfa80a8d" containerID="9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6" exitCode=0 Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.136074 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxqq9" event={"ID":"e338c204-f412-4661-a8de-e812dfa80a8d","Type":"ContainerDied","Data":"9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6"} Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.136104 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rxqq9" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.136135 4922 scope.go:117] "RemoveContainer" containerID="9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.136120 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rxqq9" event={"ID":"e338c204-f412-4661-a8de-e812dfa80a8d","Type":"ContainerDied","Data":"8022b09422921d012110e1440885391f6649091ca746821d5c5ebeb31bf5600a"} Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.143462 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.152806 4922 scope.go:117] "RemoveContainer" containerID="5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.242529 4922 scope.go:117] "RemoveContainer" containerID="f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.245708 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rxqq9"] Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.248182 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rxqq9"] Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.276615 4922 scope.go:117] "RemoveContainer" containerID="9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6" Sep 29 22:39:21 crc kubenswrapper[4922]: E0929 22:39:21.277548 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6\": container with ID starting with 9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6 not found: ID does not exist" containerID="9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.277580 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6"} err="failed to get container status \"9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6\": rpc error: code = NotFound desc = could not find container \"9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6\": container with ID starting with 9ed9aa71e2c67927d816fc1937a4f914b96cac2be3f431f3af3b76ac405743a6 not found: ID does not exist" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.277599 4922 scope.go:117] "RemoveContainer" containerID="5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60" Sep 29 22:39:21 crc kubenswrapper[4922]: E0929 22:39:21.279007 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60\": container with ID starting with 5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60 not found: ID does not exist" containerID="5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.279034 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60"} err="failed to get container status \"5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60\": rpc error: code = NotFound desc = could not find container \"5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60\": container with ID starting with 5337b347e02edf9b536b8716a958c0cf2e3c088c5ff8aba581dc28c0049b4f60 not found: ID does not exist" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.279049 4922 scope.go:117] "RemoveContainer" containerID="f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d" Sep 29 22:39:21 crc kubenswrapper[4922]: E0929 22:39:21.279517 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d\": container with ID starting with f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d not found: ID does not exist" containerID="f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.279739 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d"} err="failed to get container status \"f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d\": rpc error: code = NotFound desc = could not find container \"f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d\": container with ID starting with f5b847a7e73f908bfe450d7d815662ede3b697a7f507bcb2e8afcbab44e43d5d not found: ID does not exist" Sep 29 22:39:21 crc kubenswrapper[4922]: I0929 22:39:21.642018 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8fxhg"] Sep 29 22:39:22 crc kubenswrapper[4922]: I0929 22:39:22.147310 4922 generic.go:334] "Generic (PLEG): container finished" podID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerID="148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d" exitCode=0 Sep 29 22:39:22 crc kubenswrapper[4922]: I0929 22:39:22.147372 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8fxhg" event={"ID":"a4aa042b-4062-4fec-a47b-d9d3fac94615","Type":"ContainerDied","Data":"148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d"} Sep 29 22:39:22 crc kubenswrapper[4922]: I0929 22:39:22.147438 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8fxhg" event={"ID":"a4aa042b-4062-4fec-a47b-d9d3fac94615","Type":"ContainerStarted","Data":"9bd06b393850971adf5f77297b737972c0dfd16ed7875d0cfc297f5b74cfd70d"} Sep 29 22:39:22 crc kubenswrapper[4922]: I0929 22:39:22.431529 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e338c204-f412-4661-a8de-e812dfa80a8d" path="/var/lib/kubelet/pods/e338c204-f412-4661-a8de-e812dfa80a8d/volumes" Sep 29 22:39:23 crc kubenswrapper[4922]: I0929 22:39:23.159488 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8fxhg" event={"ID":"a4aa042b-4062-4fec-a47b-d9d3fac94615","Type":"ContainerStarted","Data":"0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3"} Sep 29 22:39:24 crc kubenswrapper[4922]: I0929 22:39:24.170298 4922 generic.go:334] "Generic (PLEG): container finished" podID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerID="0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3" exitCode=0 Sep 29 22:39:24 crc kubenswrapper[4922]: I0929 22:39:24.170360 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8fxhg" event={"ID":"a4aa042b-4062-4fec-a47b-d9d3fac94615","Type":"ContainerDied","Data":"0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3"} Sep 29 22:39:25 crc kubenswrapper[4922]: I0929 22:39:25.181915 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8fxhg" event={"ID":"a4aa042b-4062-4fec-a47b-d9d3fac94615","Type":"ContainerStarted","Data":"57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022"} Sep 29 22:39:25 crc kubenswrapper[4922]: I0929 22:39:25.214126 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8fxhg" podStartSLOduration=2.496005455 podStartE2EDuration="5.214093361s" podCreationTimestamp="2025-09-29 22:39:20 +0000 UTC" firstStartedPulling="2025-09-29 22:39:22.150195562 +0000 UTC m=+766.460484415" lastFinishedPulling="2025-09-29 22:39:24.868283468 +0000 UTC m=+769.178572321" observedRunningTime="2025-09-29 22:39:25.206895143 +0000 UTC m=+769.517183996" watchObservedRunningTime="2025-09-29 22:39:25.214093361 +0000 UTC m=+769.524382204" Sep 29 22:39:28 crc kubenswrapper[4922]: I0929 22:39:28.912923 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:39:28 crc kubenswrapper[4922]: I0929 22:39:28.913016 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:39:31 crc kubenswrapper[4922]: I0929 22:39:31.143884 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:31 crc kubenswrapper[4922]: I0929 22:39:31.143997 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:31 crc kubenswrapper[4922]: I0929 22:39:31.211772 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:31 crc kubenswrapper[4922]: I0929 22:39:31.274211 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:31 crc kubenswrapper[4922]: I0929 22:39:31.421640 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:31 crc kubenswrapper[4922]: I0929 22:39:31.422261 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:31 crc kubenswrapper[4922]: I0929 22:39:31.454865 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8fxhg"] Sep 29 22:39:31 crc kubenswrapper[4922]: I0929 22:39:31.900896 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-rrsl6"] Sep 29 22:39:32 crc kubenswrapper[4922]: I0929 22:39:32.229285 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-rrsl6" event={"ID":"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85","Type":"ContainerStarted","Data":"84553332f9b5b12632e4c248cbb10333fc54add2f5d1a39bc5330890f234c2dc"} Sep 29 22:39:33 crc kubenswrapper[4922]: I0929 22:39:33.234944 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8fxhg" podUID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerName="registry-server" containerID="cri-o://57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022" gracePeriod=2 Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.219775 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.250437 4922 generic.go:334] "Generic (PLEG): container finished" podID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerID="57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022" exitCode=0 Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.250484 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8fxhg" event={"ID":"a4aa042b-4062-4fec-a47b-d9d3fac94615","Type":"ContainerDied","Data":"57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022"} Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.250537 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8fxhg" event={"ID":"a4aa042b-4062-4fec-a47b-d9d3fac94615","Type":"ContainerDied","Data":"9bd06b393850971adf5f77297b737972c0dfd16ed7875d0cfc297f5b74cfd70d"} Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.250563 4922 scope.go:117] "RemoveContainer" containerID="57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.250584 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8fxhg" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.254318 4922 generic.go:334] "Generic (PLEG): container finished" podID="74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" containerID="e5a6270e8307b919f32a818b901ebb752e448dbb0d51061f483347e948fddfd9" exitCode=0 Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.254374 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-rrsl6" event={"ID":"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85","Type":"ContainerDied","Data":"e5a6270e8307b919f32a818b901ebb752e448dbb0d51061f483347e948fddfd9"} Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.274410 4922 scope.go:117] "RemoveContainer" containerID="0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.293547 4922 scope.go:117] "RemoveContainer" containerID="148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.309610 4922 scope.go:117] "RemoveContainer" containerID="57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022" Sep 29 22:39:34 crc kubenswrapper[4922]: E0929 22:39:34.310178 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022\": container with ID starting with 57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022 not found: ID does not exist" containerID="57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.310219 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022"} err="failed to get container status \"57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022\": rpc error: code = NotFound desc = could not find container \"57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022\": container with ID starting with 57f524808043fd6568def6998895ceb7c8099fdbb1f5deac78c7a2b221efa022 not found: ID does not exist" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.310246 4922 scope.go:117] "RemoveContainer" containerID="0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3" Sep 29 22:39:34 crc kubenswrapper[4922]: E0929 22:39:34.310718 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3\": container with ID starting with 0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3 not found: ID does not exist" containerID="0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.310738 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3"} err="failed to get container status \"0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3\": rpc error: code = NotFound desc = could not find container \"0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3\": container with ID starting with 0adf28bf84beb18d0f9d734a1b3f77627cf769f13bf88cc09786ce00434766c3 not found: ID does not exist" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.310749 4922 scope.go:117] "RemoveContainer" containerID="148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d" Sep 29 22:39:34 crc kubenswrapper[4922]: E0929 22:39:34.311176 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d\": container with ID starting with 148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d not found: ID does not exist" containerID="148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.311239 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d"} err="failed to get container status \"148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d\": rpc error: code = NotFound desc = could not find container \"148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d\": container with ID starting with 148c85d5b8a3712db5771c44f32ab5b847e57deb279119405404f9b643eb764d not found: ID does not exist" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.340245 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7hnf\" (UniqueName: \"kubernetes.io/projected/a4aa042b-4062-4fec-a47b-d9d3fac94615-kube-api-access-q7hnf\") pod \"a4aa042b-4062-4fec-a47b-d9d3fac94615\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.340435 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-utilities\") pod \"a4aa042b-4062-4fec-a47b-d9d3fac94615\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.340484 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-catalog-content\") pod \"a4aa042b-4062-4fec-a47b-d9d3fac94615\" (UID: \"a4aa042b-4062-4fec-a47b-d9d3fac94615\") " Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.341987 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-utilities" (OuterVolumeSpecName: "utilities") pod "a4aa042b-4062-4fec-a47b-d9d3fac94615" (UID: "a4aa042b-4062-4fec-a47b-d9d3fac94615"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.345080 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4aa042b-4062-4fec-a47b-d9d3fac94615-kube-api-access-q7hnf" (OuterVolumeSpecName: "kube-api-access-q7hnf") pod "a4aa042b-4062-4fec-a47b-d9d3fac94615" (UID: "a4aa042b-4062-4fec-a47b-d9d3fac94615"). InnerVolumeSpecName "kube-api-access-q7hnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.386664 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4aa042b-4062-4fec-a47b-d9d3fac94615" (UID: "a4aa042b-4062-4fec-a47b-d9d3fac94615"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.441599 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.441631 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4aa042b-4062-4fec-a47b-d9d3fac94615-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.441643 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7hnf\" (UniqueName: \"kubernetes.io/projected/a4aa042b-4062-4fec-a47b-d9d3fac94615-kube-api-access-q7hnf\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.578522 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8fxhg"] Sep 29 22:39:34 crc kubenswrapper[4922]: I0929 22:39:34.590255 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8fxhg"] Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.669808 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.757356 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-node-mnt\") pod \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.757496 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjqhj\" (UniqueName: \"kubernetes.io/projected/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-kube-api-access-hjqhj\") pod \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.757544 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" (UID: "74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.757603 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-crc-storage\") pod \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\" (UID: \"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85\") " Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.757821 4922 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-node-mnt\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.768601 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-kube-api-access-hjqhj" (OuterVolumeSpecName: "kube-api-access-hjqhj") pod "74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" (UID: "74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85"). InnerVolumeSpecName "kube-api-access-hjqhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.781050 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" (UID: "74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.859322 4922 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-crc-storage\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:35 crc kubenswrapper[4922]: I0929 22:39:35.859363 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjqhj\" (UniqueName: \"kubernetes.io/projected/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85-kube-api-access-hjqhj\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:36 crc kubenswrapper[4922]: I0929 22:39:36.271841 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-rrsl6" event={"ID":"74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85","Type":"ContainerDied","Data":"84553332f9b5b12632e4c248cbb10333fc54add2f5d1a39bc5330890f234c2dc"} Sep 29 22:39:36 crc kubenswrapper[4922]: I0929 22:39:36.271882 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84553332f9b5b12632e4c248cbb10333fc54add2f5d1a39bc5330890f234c2dc" Sep 29 22:39:36 crc kubenswrapper[4922]: I0929 22:39:36.271914 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-rrsl6" Sep 29 22:39:36 crc kubenswrapper[4922]: I0929 22:39:36.434128 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4aa042b-4062-4fec-a47b-d9d3fac94615" path="/var/lib/kubelet/pods/a4aa042b-4062-4fec-a47b-d9d3fac94615/volumes" Sep 29 22:39:36 crc kubenswrapper[4922]: I0929 22:39:36.749964 4922 scope.go:117] "RemoveContainer" containerID="7c9e93d5db38ee545214c0080870b9c33a58f1b0719e9e49cb2830160116864e" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.269473 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qhkpl"] Sep 29 22:39:37 crc kubenswrapper[4922]: E0929 22:39:37.269834 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerName="extract-content" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.269866 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerName="extract-content" Sep 29 22:39:37 crc kubenswrapper[4922]: E0929 22:39:37.269885 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerName="registry-server" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.269897 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerName="registry-server" Sep 29 22:39:37 crc kubenswrapper[4922]: E0929 22:39:37.269926 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerName="extract-utilities" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.269940 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerName="extract-utilities" Sep 29 22:39:37 crc kubenswrapper[4922]: E0929 22:39:37.269956 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" containerName="storage" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.269968 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" containerName="storage" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.270129 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" containerName="storage" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.270152 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4aa042b-4062-4fec-a47b-d9d3fac94615" containerName="registry-server" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.275999 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.288944 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhkpl"] Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.381574 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvx7h\" (UniqueName: \"kubernetes.io/projected/ba5b228e-b34d-4a19-8137-3b449bb4233f-kube-api-access-vvx7h\") pod \"redhat-marketplace-qhkpl\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.381798 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-utilities\") pod \"redhat-marketplace-qhkpl\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.381925 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-catalog-content\") pod \"redhat-marketplace-qhkpl\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.483891 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvx7h\" (UniqueName: \"kubernetes.io/projected/ba5b228e-b34d-4a19-8137-3b449bb4233f-kube-api-access-vvx7h\") pod \"redhat-marketplace-qhkpl\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.483966 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-utilities\") pod \"redhat-marketplace-qhkpl\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.484053 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-catalog-content\") pod \"redhat-marketplace-qhkpl\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.484946 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-catalog-content\") pod \"redhat-marketplace-qhkpl\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.485364 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-utilities\") pod \"redhat-marketplace-qhkpl\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.517330 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvx7h\" (UniqueName: \"kubernetes.io/projected/ba5b228e-b34d-4a19-8137-3b449bb4233f-kube-api-access-vvx7h\") pod \"redhat-marketplace-qhkpl\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:37 crc kubenswrapper[4922]: I0929 22:39:37.611167 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.130620 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhkpl"] Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.288286 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhkpl" event={"ID":"ba5b228e-b34d-4a19-8137-3b449bb4233f","Type":"ContainerStarted","Data":"792adac33abc1f9f6ea390da1496ce79bd2e6393e271a961e8c7e1d51125656e"} Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.674611 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lff66"] Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.676643 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.686610 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lff66"] Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.803238 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-utilities\") pod \"redhat-operators-lff66\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.803572 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqrxc\" (UniqueName: \"kubernetes.io/projected/b0fde429-939b-44db-a3dd-89618352d84d-kube-api-access-mqrxc\") pod \"redhat-operators-lff66\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.803810 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-catalog-content\") pod \"redhat-operators-lff66\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.905257 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-catalog-content\") pod \"redhat-operators-lff66\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.905701 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqrxc\" (UniqueName: \"kubernetes.io/projected/b0fde429-939b-44db-a3dd-89618352d84d-kube-api-access-mqrxc\") pod \"redhat-operators-lff66\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.905914 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-utilities\") pod \"redhat-operators-lff66\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.906155 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-catalog-content\") pod \"redhat-operators-lff66\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.906587 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-utilities\") pod \"redhat-operators-lff66\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:38 crc kubenswrapper[4922]: I0929 22:39:38.931201 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqrxc\" (UniqueName: \"kubernetes.io/projected/b0fde429-939b-44db-a3dd-89618352d84d-kube-api-access-mqrxc\") pod \"redhat-operators-lff66\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:39 crc kubenswrapper[4922]: I0929 22:39:39.003024 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:39 crc kubenswrapper[4922]: I0929 22:39:39.301221 4922 generic.go:334] "Generic (PLEG): container finished" podID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerID="0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7" exitCode=0 Sep 29 22:39:39 crc kubenswrapper[4922]: I0929 22:39:39.301282 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhkpl" event={"ID":"ba5b228e-b34d-4a19-8137-3b449bb4233f","Type":"ContainerDied","Data":"0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7"} Sep 29 22:39:39 crc kubenswrapper[4922]: I0929 22:39:39.464539 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lff66"] Sep 29 22:39:39 crc kubenswrapper[4922]: W0929 22:39:39.472885 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb0fde429_939b_44db_a3dd_89618352d84d.slice/crio-89e7de1719120bd7ed217a9a246cd0dafdc1b32f26a3a1613174a90dfe4e0d91 WatchSource:0}: Error finding container 89e7de1719120bd7ed217a9a246cd0dafdc1b32f26a3a1613174a90dfe4e0d91: Status 404 returned error can't find the container with id 89e7de1719120bd7ed217a9a246cd0dafdc1b32f26a3a1613174a90dfe4e0d91 Sep 29 22:39:40 crc kubenswrapper[4922]: I0929 22:39:40.312262 4922 generic.go:334] "Generic (PLEG): container finished" podID="b0fde429-939b-44db-a3dd-89618352d84d" containerID="05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076" exitCode=0 Sep 29 22:39:40 crc kubenswrapper[4922]: I0929 22:39:40.312347 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lff66" event={"ID":"b0fde429-939b-44db-a3dd-89618352d84d","Type":"ContainerDied","Data":"05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076"} Sep 29 22:39:40 crc kubenswrapper[4922]: I0929 22:39:40.313785 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lff66" event={"ID":"b0fde429-939b-44db-a3dd-89618352d84d","Type":"ContainerStarted","Data":"89e7de1719120bd7ed217a9a246cd0dafdc1b32f26a3a1613174a90dfe4e0d91"} Sep 29 22:39:40 crc kubenswrapper[4922]: I0929 22:39:40.319793 4922 generic.go:334] "Generic (PLEG): container finished" podID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerID="1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10" exitCode=0 Sep 29 22:39:40 crc kubenswrapper[4922]: I0929 22:39:40.319854 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhkpl" event={"ID":"ba5b228e-b34d-4a19-8137-3b449bb4233f","Type":"ContainerDied","Data":"1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10"} Sep 29 22:39:40 crc kubenswrapper[4922]: I0929 22:39:40.791802 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-8gnqr" Sep 29 22:39:41 crc kubenswrapper[4922]: I0929 22:39:41.328621 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhkpl" event={"ID":"ba5b228e-b34d-4a19-8137-3b449bb4233f","Type":"ContainerStarted","Data":"e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222"} Sep 29 22:39:41 crc kubenswrapper[4922]: I0929 22:39:41.333293 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lff66" event={"ID":"b0fde429-939b-44db-a3dd-89618352d84d","Type":"ContainerStarted","Data":"7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d"} Sep 29 22:39:41 crc kubenswrapper[4922]: I0929 22:39:41.356965 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qhkpl" podStartSLOduration=2.863079374 podStartE2EDuration="4.356924017s" podCreationTimestamp="2025-09-29 22:39:37 +0000 UTC" firstStartedPulling="2025-09-29 22:39:39.302916453 +0000 UTC m=+783.613205266" lastFinishedPulling="2025-09-29 22:39:40.796761046 +0000 UTC m=+785.107049909" observedRunningTime="2025-09-29 22:39:41.355915813 +0000 UTC m=+785.666204666" watchObservedRunningTime="2025-09-29 22:39:41.356924017 +0000 UTC m=+785.667212870" Sep 29 22:39:42 crc kubenswrapper[4922]: I0929 22:39:42.349506 4922 generic.go:334] "Generic (PLEG): container finished" podID="b0fde429-939b-44db-a3dd-89618352d84d" containerID="7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d" exitCode=0 Sep 29 22:39:42 crc kubenswrapper[4922]: I0929 22:39:42.349634 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lff66" event={"ID":"b0fde429-939b-44db-a3dd-89618352d84d","Type":"ContainerDied","Data":"7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d"} Sep 29 22:39:43 crc kubenswrapper[4922]: I0929 22:39:43.358788 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lff66" event={"ID":"b0fde429-939b-44db-a3dd-89618352d84d","Type":"ContainerStarted","Data":"ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee"} Sep 29 22:39:43 crc kubenswrapper[4922]: I0929 22:39:43.397041 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lff66" podStartSLOduration=2.887913234 podStartE2EDuration="5.39701608s" podCreationTimestamp="2025-09-29 22:39:38 +0000 UTC" firstStartedPulling="2025-09-29 22:39:40.314948968 +0000 UTC m=+784.625237821" lastFinishedPulling="2025-09-29 22:39:42.824051824 +0000 UTC m=+787.134340667" observedRunningTime="2025-09-29 22:39:43.390679244 +0000 UTC m=+787.700968097" watchObservedRunningTime="2025-09-29 22:39:43.39701608 +0000 UTC m=+787.707304923" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.333618 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt"] Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.335238 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.337724 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.349573 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt"] Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.393458 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.393509 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.393546 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg8m7\" (UniqueName: \"kubernetes.io/projected/d286b91a-b67a-4540-b8e7-5296d645fead-kube-api-access-xg8m7\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.495046 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg8m7\" (UniqueName: \"kubernetes.io/projected/d286b91a-b67a-4540-b8e7-5296d645fead-kube-api-access-xg8m7\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.495232 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.495269 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.496002 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.498553 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.542928 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg8m7\" (UniqueName: \"kubernetes.io/projected/d286b91a-b67a-4540-b8e7-5296d645fead-kube-api-access-xg8m7\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:45 crc kubenswrapper[4922]: I0929 22:39:45.660498 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:46 crc kubenswrapper[4922]: I0929 22:39:46.182218 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt"] Sep 29 22:39:46 crc kubenswrapper[4922]: I0929 22:39:46.381834 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" event={"ID":"d286b91a-b67a-4540-b8e7-5296d645fead","Type":"ContainerStarted","Data":"b693d3225079f67cc865b5411f68f32e9284c0a4bbfe6f01a5c8b158f73fe3b4"} Sep 29 22:39:46 crc kubenswrapper[4922]: I0929 22:39:46.382289 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" event={"ID":"d286b91a-b67a-4540-b8e7-5296d645fead","Type":"ContainerStarted","Data":"8114d77c059cd01913c944dee1091f0917333a3197e406b86e5db7796a7473fa"} Sep 29 22:39:47 crc kubenswrapper[4922]: I0929 22:39:47.611793 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:47 crc kubenswrapper[4922]: I0929 22:39:47.611966 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:47 crc kubenswrapper[4922]: I0929 22:39:47.682139 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:48 crc kubenswrapper[4922]: I0929 22:39:48.397333 4922 generic.go:334] "Generic (PLEG): container finished" podID="d286b91a-b67a-4540-b8e7-5296d645fead" containerID="b693d3225079f67cc865b5411f68f32e9284c0a4bbfe6f01a5c8b158f73fe3b4" exitCode=0 Sep 29 22:39:48 crc kubenswrapper[4922]: I0929 22:39:48.397383 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" event={"ID":"d286b91a-b67a-4540-b8e7-5296d645fead","Type":"ContainerDied","Data":"b693d3225079f67cc865b5411f68f32e9284c0a4bbfe6f01a5c8b158f73fe3b4"} Sep 29 22:39:48 crc kubenswrapper[4922]: I0929 22:39:48.476791 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:49 crc kubenswrapper[4922]: I0929 22:39:49.003872 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:49 crc kubenswrapper[4922]: I0929 22:39:49.003968 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:49 crc kubenswrapper[4922]: I0929 22:39:49.072519 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:49 crc kubenswrapper[4922]: I0929 22:39:49.476299 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:50 crc kubenswrapper[4922]: I0929 22:39:50.414208 4922 generic.go:334] "Generic (PLEG): container finished" podID="d286b91a-b67a-4540-b8e7-5296d645fead" containerID="b1ca004990e47ebcbbc7366017322ce6c753ec6d01c86e31e402f2a253ccdb41" exitCode=0 Sep 29 22:39:50 crc kubenswrapper[4922]: I0929 22:39:50.414310 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" event={"ID":"d286b91a-b67a-4540-b8e7-5296d645fead","Type":"ContainerDied","Data":"b1ca004990e47ebcbbc7366017322ce6c753ec6d01c86e31e402f2a253ccdb41"} Sep 29 22:39:50 crc kubenswrapper[4922]: I0929 22:39:50.659484 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhkpl"] Sep 29 22:39:51 crc kubenswrapper[4922]: I0929 22:39:51.424473 4922 generic.go:334] "Generic (PLEG): container finished" podID="d286b91a-b67a-4540-b8e7-5296d645fead" containerID="281001c6d97bc0235782d7267fb9e11eff848245f9734e125ac2c7ad0fc4239f" exitCode=0 Sep 29 22:39:51 crc kubenswrapper[4922]: I0929 22:39:51.424580 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" event={"ID":"d286b91a-b67a-4540-b8e7-5296d645fead","Type":"ContainerDied","Data":"281001c6d97bc0235782d7267fb9e11eff848245f9734e125ac2c7ad0fc4239f"} Sep 29 22:39:51 crc kubenswrapper[4922]: I0929 22:39:51.424676 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qhkpl" podUID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerName="registry-server" containerID="cri-o://e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222" gracePeriod=2 Sep 29 22:39:51 crc kubenswrapper[4922]: I0929 22:39:51.922830 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.011626 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-catalog-content\") pod \"ba5b228e-b34d-4a19-8137-3b449bb4233f\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.011708 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvx7h\" (UniqueName: \"kubernetes.io/projected/ba5b228e-b34d-4a19-8137-3b449bb4233f-kube-api-access-vvx7h\") pod \"ba5b228e-b34d-4a19-8137-3b449bb4233f\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.011765 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-utilities\") pod \"ba5b228e-b34d-4a19-8137-3b449bb4233f\" (UID: \"ba5b228e-b34d-4a19-8137-3b449bb4233f\") " Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.012880 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-utilities" (OuterVolumeSpecName: "utilities") pod "ba5b228e-b34d-4a19-8137-3b449bb4233f" (UID: "ba5b228e-b34d-4a19-8137-3b449bb4233f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.020072 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba5b228e-b34d-4a19-8137-3b449bb4233f-kube-api-access-vvx7h" (OuterVolumeSpecName: "kube-api-access-vvx7h") pod "ba5b228e-b34d-4a19-8137-3b449bb4233f" (UID: "ba5b228e-b34d-4a19-8137-3b449bb4233f"). InnerVolumeSpecName "kube-api-access-vvx7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.042653 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba5b228e-b34d-4a19-8137-3b449bb4233f" (UID: "ba5b228e-b34d-4a19-8137-3b449bb4233f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.113781 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.113834 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvx7h\" (UniqueName: \"kubernetes.io/projected/ba5b228e-b34d-4a19-8137-3b449bb4233f-kube-api-access-vvx7h\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.113856 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba5b228e-b34d-4a19-8137-3b449bb4233f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.435816 4922 generic.go:334] "Generic (PLEG): container finished" podID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerID="e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222" exitCode=0 Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.435898 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhkpl" event={"ID":"ba5b228e-b34d-4a19-8137-3b449bb4233f","Type":"ContainerDied","Data":"e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222"} Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.435944 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qhkpl" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.435984 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhkpl" event={"ID":"ba5b228e-b34d-4a19-8137-3b449bb4233f","Type":"ContainerDied","Data":"792adac33abc1f9f6ea390da1496ce79bd2e6393e271a961e8c7e1d51125656e"} Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.436008 4922 scope.go:117] "RemoveContainer" containerID="e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.477882 4922 scope.go:117] "RemoveContainer" containerID="1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.483139 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhkpl"] Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.489061 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhkpl"] Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.502735 4922 scope.go:117] "RemoveContainer" containerID="0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.555108 4922 scope.go:117] "RemoveContainer" containerID="e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222" Sep 29 22:39:52 crc kubenswrapper[4922]: E0929 22:39:52.556607 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222\": container with ID starting with e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222 not found: ID does not exist" containerID="e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.557237 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222"} err="failed to get container status \"e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222\": rpc error: code = NotFound desc = could not find container \"e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222\": container with ID starting with e92e53ab13b01bb451791c8ffe73f789b1ef65ac557ca6386e99550573923222 not found: ID does not exist" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.557426 4922 scope.go:117] "RemoveContainer" containerID="1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10" Sep 29 22:39:52 crc kubenswrapper[4922]: E0929 22:39:52.558229 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10\": container with ID starting with 1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10 not found: ID does not exist" containerID="1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.558286 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10"} err="failed to get container status \"1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10\": rpc error: code = NotFound desc = could not find container \"1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10\": container with ID starting with 1a12aef382a396f958606332076a0fb5a2e3375ee4e16e8b7ad1f5fbb19cfd10 not found: ID does not exist" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.558321 4922 scope.go:117] "RemoveContainer" containerID="0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7" Sep 29 22:39:52 crc kubenswrapper[4922]: E0929 22:39:52.559042 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7\": container with ID starting with 0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7 not found: ID does not exist" containerID="0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.559091 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7"} err="failed to get container status \"0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7\": rpc error: code = NotFound desc = could not find container \"0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7\": container with ID starting with 0b18d12c9170ae2d714b9a8e25bde235dd5692569920e8a1452a107396f383a7 not found: ID does not exist" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.659736 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lff66"] Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.660083 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lff66" podUID="b0fde429-939b-44db-a3dd-89618352d84d" containerName="registry-server" containerID="cri-o://ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee" gracePeriod=2 Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.824496 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.929348 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-bundle\") pod \"d286b91a-b67a-4540-b8e7-5296d645fead\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.929483 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-util\") pod \"d286b91a-b67a-4540-b8e7-5296d645fead\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.929789 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg8m7\" (UniqueName: \"kubernetes.io/projected/d286b91a-b67a-4540-b8e7-5296d645fead-kube-api-access-xg8m7\") pod \"d286b91a-b67a-4540-b8e7-5296d645fead\" (UID: \"d286b91a-b67a-4540-b8e7-5296d645fead\") " Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.932127 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-bundle" (OuterVolumeSpecName: "bundle") pod "d286b91a-b67a-4540-b8e7-5296d645fead" (UID: "d286b91a-b67a-4540-b8e7-5296d645fead"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.943529 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d286b91a-b67a-4540-b8e7-5296d645fead-kube-api-access-xg8m7" (OuterVolumeSpecName: "kube-api-access-xg8m7") pod "d286b91a-b67a-4540-b8e7-5296d645fead" (UID: "d286b91a-b67a-4540-b8e7-5296d645fead"). InnerVolumeSpecName "kube-api-access-xg8m7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:39:52 crc kubenswrapper[4922]: I0929 22:39:52.959792 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-util" (OuterVolumeSpecName: "util") pod "d286b91a-b67a-4540-b8e7-5296d645fead" (UID: "d286b91a-b67a-4540-b8e7-5296d645fead"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.031673 4922 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-util\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.031695 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg8m7\" (UniqueName: \"kubernetes.io/projected/d286b91a-b67a-4540-b8e7-5296d645fead-kube-api-access-xg8m7\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.031705 4922 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d286b91a-b67a-4540-b8e7-5296d645fead-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.152414 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.234338 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-utilities\") pod \"b0fde429-939b-44db-a3dd-89618352d84d\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.234486 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqrxc\" (UniqueName: \"kubernetes.io/projected/b0fde429-939b-44db-a3dd-89618352d84d-kube-api-access-mqrxc\") pod \"b0fde429-939b-44db-a3dd-89618352d84d\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.234562 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-catalog-content\") pod \"b0fde429-939b-44db-a3dd-89618352d84d\" (UID: \"b0fde429-939b-44db-a3dd-89618352d84d\") " Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.236136 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-utilities" (OuterVolumeSpecName: "utilities") pod "b0fde429-939b-44db-a3dd-89618352d84d" (UID: "b0fde429-939b-44db-a3dd-89618352d84d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.238368 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0fde429-939b-44db-a3dd-89618352d84d-kube-api-access-mqrxc" (OuterVolumeSpecName: "kube-api-access-mqrxc") pod "b0fde429-939b-44db-a3dd-89618352d84d" (UID: "b0fde429-939b-44db-a3dd-89618352d84d"). InnerVolumeSpecName "kube-api-access-mqrxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.336006 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqrxc\" (UniqueName: \"kubernetes.io/projected/b0fde429-939b-44db-a3dd-89618352d84d-kube-api-access-mqrxc\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.336064 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.363441 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0fde429-939b-44db-a3dd-89618352d84d" (UID: "b0fde429-939b-44db-a3dd-89618352d84d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.436877 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0fde429-939b-44db-a3dd-89618352d84d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.446705 4922 generic.go:334] "Generic (PLEG): container finished" podID="b0fde429-939b-44db-a3dd-89618352d84d" containerID="ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee" exitCode=0 Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.446766 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lff66" event={"ID":"b0fde429-939b-44db-a3dd-89618352d84d","Type":"ContainerDied","Data":"ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee"} Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.446793 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lff66" event={"ID":"b0fde429-939b-44db-a3dd-89618352d84d","Type":"ContainerDied","Data":"89e7de1719120bd7ed217a9a246cd0dafdc1b32f26a3a1613174a90dfe4e0d91"} Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.446809 4922 scope.go:117] "RemoveContainer" containerID="ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.446840 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lff66" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.451431 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" event={"ID":"d286b91a-b67a-4540-b8e7-5296d645fead","Type":"ContainerDied","Data":"8114d77c059cd01913c944dee1091f0917333a3197e406b86e5db7796a7473fa"} Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.451494 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8114d77c059cd01913c944dee1091f0917333a3197e406b86e5db7796a7473fa" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.451443 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.468380 4922 scope.go:117] "RemoveContainer" containerID="7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.497416 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lff66"] Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.503787 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lff66"] Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.504646 4922 scope.go:117] "RemoveContainer" containerID="05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.524095 4922 scope.go:117] "RemoveContainer" containerID="ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee" Sep 29 22:39:53 crc kubenswrapper[4922]: E0929 22:39:53.524489 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee\": container with ID starting with ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee not found: ID does not exist" containerID="ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.524530 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee"} err="failed to get container status \"ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee\": rpc error: code = NotFound desc = could not find container \"ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee\": container with ID starting with ed67a23fa3f4acacc3ca7f36c12704ff9b8191e28719114bcc2d8a1e3af945ee not found: ID does not exist" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.524557 4922 scope.go:117] "RemoveContainer" containerID="7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d" Sep 29 22:39:53 crc kubenswrapper[4922]: E0929 22:39:53.524898 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d\": container with ID starting with 7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d not found: ID does not exist" containerID="7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.524920 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d"} err="failed to get container status \"7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d\": rpc error: code = NotFound desc = could not find container \"7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d\": container with ID starting with 7cb26bb4c7512bdb8f7484f2962ddaab8ba8b637d937fa442f4b11929d61888d not found: ID does not exist" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.524932 4922 scope.go:117] "RemoveContainer" containerID="05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076" Sep 29 22:39:53 crc kubenswrapper[4922]: E0929 22:39:53.525267 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076\": container with ID starting with 05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076 not found: ID does not exist" containerID="05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076" Sep 29 22:39:53 crc kubenswrapper[4922]: I0929 22:39:53.525335 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076"} err="failed to get container status \"05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076\": rpc error: code = NotFound desc = could not find container \"05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076\": container with ID starting with 05c763225f77910ac9ee5f74cad0e6494cfa8a4245677e89ea318ecb7bc7f076 not found: ID does not exist" Sep 29 22:39:54 crc kubenswrapper[4922]: I0929 22:39:54.433657 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0fde429-939b-44db-a3dd-89618352d84d" path="/var/lib/kubelet/pods/b0fde429-939b-44db-a3dd-89618352d84d/volumes" Sep 29 22:39:54 crc kubenswrapper[4922]: I0929 22:39:54.434947 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba5b228e-b34d-4a19-8137-3b449bb4233f" path="/var/lib/kubelet/pods/ba5b228e-b34d-4a19-8137-3b449bb4233f/volumes" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.159446 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42"] Sep 29 22:39:56 crc kubenswrapper[4922]: E0929 22:39:56.160679 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d286b91a-b67a-4540-b8e7-5296d645fead" containerName="pull" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.160755 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d286b91a-b67a-4540-b8e7-5296d645fead" containerName="pull" Sep 29 22:39:56 crc kubenswrapper[4922]: E0929 22:39:56.160825 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerName="extract-utilities" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.160847 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerName="extract-utilities" Sep 29 22:39:56 crc kubenswrapper[4922]: E0929 22:39:56.160874 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0fde429-939b-44db-a3dd-89618352d84d" containerName="extract-content" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.160924 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0fde429-939b-44db-a3dd-89618352d84d" containerName="extract-content" Sep 29 22:39:56 crc kubenswrapper[4922]: E0929 22:39:56.160945 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerName="registry-server" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.160956 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerName="registry-server" Sep 29 22:39:56 crc kubenswrapper[4922]: E0929 22:39:56.160972 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d286b91a-b67a-4540-b8e7-5296d645fead" containerName="util" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.161007 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d286b91a-b67a-4540-b8e7-5296d645fead" containerName="util" Sep 29 22:39:56 crc kubenswrapper[4922]: E0929 22:39:56.161028 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0fde429-939b-44db-a3dd-89618352d84d" containerName="registry-server" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.161038 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0fde429-939b-44db-a3dd-89618352d84d" containerName="registry-server" Sep 29 22:39:56 crc kubenswrapper[4922]: E0929 22:39:56.161049 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerName="extract-content" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.161061 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerName="extract-content" Sep 29 22:39:56 crc kubenswrapper[4922]: E0929 22:39:56.161100 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d286b91a-b67a-4540-b8e7-5296d645fead" containerName="extract" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.161111 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d286b91a-b67a-4540-b8e7-5296d645fead" containerName="extract" Sep 29 22:39:56 crc kubenswrapper[4922]: E0929 22:39:56.161132 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0fde429-939b-44db-a3dd-89618352d84d" containerName="extract-utilities" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.161143 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0fde429-939b-44db-a3dd-89618352d84d" containerName="extract-utilities" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.161349 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0fde429-939b-44db-a3dd-89618352d84d" containerName="registry-server" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.161370 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba5b228e-b34d-4a19-8137-3b449bb4233f" containerName="registry-server" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.161387 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d286b91a-b67a-4540-b8e7-5296d645fead" containerName="extract" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.162208 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.166150 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.166176 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.166505 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-ltrsq" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.167678 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42"] Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.278232 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsmrn\" (UniqueName: \"kubernetes.io/projected/d17ba7e9-17b9-45e4-86fe-d7efcf6732be-kube-api-access-nsmrn\") pod \"nmstate-operator-5d6f6cfd66-m7r42\" (UID: \"d17ba7e9-17b9-45e4-86fe-d7efcf6732be\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.379231 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsmrn\" (UniqueName: \"kubernetes.io/projected/d17ba7e9-17b9-45e4-86fe-d7efcf6732be-kube-api-access-nsmrn\") pod \"nmstate-operator-5d6f6cfd66-m7r42\" (UID: \"d17ba7e9-17b9-45e4-86fe-d7efcf6732be\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.412567 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsmrn\" (UniqueName: \"kubernetes.io/projected/d17ba7e9-17b9-45e4-86fe-d7efcf6732be-kube-api-access-nsmrn\") pod \"nmstate-operator-5d6f6cfd66-m7r42\" (UID: \"d17ba7e9-17b9-45e4-86fe-d7efcf6732be\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.482631 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42" Sep 29 22:39:56 crc kubenswrapper[4922]: I0929 22:39:56.937644 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42"] Sep 29 22:39:57 crc kubenswrapper[4922]: I0929 22:39:57.491070 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42" event={"ID":"d17ba7e9-17b9-45e4-86fe-d7efcf6732be","Type":"ContainerStarted","Data":"adab43b7fea64961ec478155c9e299b1511549b91d03e00d7fd2e8ceafc260b9"} Sep 29 22:39:58 crc kubenswrapper[4922]: I0929 22:39:58.913234 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:39:58 crc kubenswrapper[4922]: I0929 22:39:58.913589 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:39:59 crc kubenswrapper[4922]: I0929 22:39:59.507825 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42" event={"ID":"d17ba7e9-17b9-45e4-86fe-d7efcf6732be","Type":"ContainerStarted","Data":"4cdd792d96639f3ac236cf1fd6b730f576b8177fc97d2fcd289c9ba7f0c1b046"} Sep 29 22:39:59 crc kubenswrapper[4922]: I0929 22:39:59.535078 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-m7r42" podStartSLOduration=1.146294944 podStartE2EDuration="3.535054387s" podCreationTimestamp="2025-09-29 22:39:56 +0000 UTC" firstStartedPulling="2025-09-29 22:39:56.946983601 +0000 UTC m=+801.257272444" lastFinishedPulling="2025-09-29 22:39:59.335743064 +0000 UTC m=+803.646031887" observedRunningTime="2025-09-29 22:39:59.53151017 +0000 UTC m=+803.841799003" watchObservedRunningTime="2025-09-29 22:39:59.535054387 +0000 UTC m=+803.845343230" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.279374 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9"] Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.280343 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.287550 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-vmg6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.289953 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-xk99n"] Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.299601 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.305842 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9"] Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.306182 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.324054 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-scshm"] Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.326286 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.350465 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-xk99n"] Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.408930 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0e49bb85-66d4-471e-96b0-ae49830ad4e2-ovs-socket\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.408972 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n88t8\" (UniqueName: \"kubernetes.io/projected/0e49bb85-66d4-471e-96b0-ae49830ad4e2-kube-api-access-n88t8\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.409008 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq5hq\" (UniqueName: \"kubernetes.io/projected/3cc45e35-ac73-47d9-809e-408dbd5f0077-kube-api-access-fq5hq\") pod \"nmstate-webhook-6d689559c5-xk99n\" (UID: \"3cc45e35-ac73-47d9-809e-408dbd5f0077\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.409037 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb8gl\" (UniqueName: \"kubernetes.io/projected/68088178-12b9-4f0e-afa1-684bd52caf29-kube-api-access-mb8gl\") pod \"nmstate-metrics-58fcddf996-vsmb9\" (UID: \"68088178-12b9-4f0e-afa1-684bd52caf29\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.409053 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0e49bb85-66d4-471e-96b0-ae49830ad4e2-nmstate-lock\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.409072 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3cc45e35-ac73-47d9-809e-408dbd5f0077-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-xk99n\" (UID: \"3cc45e35-ac73-47d9-809e-408dbd5f0077\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.409091 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0e49bb85-66d4-471e-96b0-ae49830ad4e2-dbus-socket\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.469957 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv"] Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.470571 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.475526 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.475617 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.475912 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-wqpz8" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.478828 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv"] Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.511980 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3cc45e35-ac73-47d9-809e-408dbd5f0077-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-xk99n\" (UID: \"3cc45e35-ac73-47d9-809e-408dbd5f0077\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.512017 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0e49bb85-66d4-471e-96b0-ae49830ad4e2-dbus-socket\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.512079 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0e49bb85-66d4-471e-96b0-ae49830ad4e2-ovs-socket\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.512097 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n88t8\" (UniqueName: \"kubernetes.io/projected/0e49bb85-66d4-471e-96b0-ae49830ad4e2-kube-api-access-n88t8\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: E0929 22:40:05.512122 4922 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Sep 29 22:40:05 crc kubenswrapper[4922]: E0929 22:40:05.512196 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3cc45e35-ac73-47d9-809e-408dbd5f0077-tls-key-pair podName:3cc45e35-ac73-47d9-809e-408dbd5f0077 nodeName:}" failed. No retries permitted until 2025-09-29 22:40:06.01217641 +0000 UTC m=+810.322465223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/3cc45e35-ac73-47d9-809e-408dbd5f0077-tls-key-pair") pod "nmstate-webhook-6d689559c5-xk99n" (UID: "3cc45e35-ac73-47d9-809e-408dbd5f0077") : secret "openshift-nmstate-webhook" not found Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.512137 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq5hq\" (UniqueName: \"kubernetes.io/projected/3cc45e35-ac73-47d9-809e-408dbd5f0077-kube-api-access-fq5hq\") pod \"nmstate-webhook-6d689559c5-xk99n\" (UID: \"3cc45e35-ac73-47d9-809e-408dbd5f0077\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.512435 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/0e49bb85-66d4-471e-96b0-ae49830ad4e2-ovs-socket\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.512444 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb8gl\" (UniqueName: \"kubernetes.io/projected/68088178-12b9-4f0e-afa1-684bd52caf29-kube-api-access-mb8gl\") pod \"nmstate-metrics-58fcddf996-vsmb9\" (UID: \"68088178-12b9-4f0e-afa1-684bd52caf29\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.512477 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0e49bb85-66d4-471e-96b0-ae49830ad4e2-nmstate-lock\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.512533 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/0e49bb85-66d4-471e-96b0-ae49830ad4e2-dbus-socket\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.512558 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/0e49bb85-66d4-471e-96b0-ae49830ad4e2-nmstate-lock\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.530302 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n88t8\" (UniqueName: \"kubernetes.io/projected/0e49bb85-66d4-471e-96b0-ae49830ad4e2-kube-api-access-n88t8\") pod \"nmstate-handler-scshm\" (UID: \"0e49bb85-66d4-471e-96b0-ae49830ad4e2\") " pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.530408 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq5hq\" (UniqueName: \"kubernetes.io/projected/3cc45e35-ac73-47d9-809e-408dbd5f0077-kube-api-access-fq5hq\") pod \"nmstate-webhook-6d689559c5-xk99n\" (UID: \"3cc45e35-ac73-47d9-809e-408dbd5f0077\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.530433 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb8gl\" (UniqueName: \"kubernetes.io/projected/68088178-12b9-4f0e-afa1-684bd52caf29-kube-api-access-mb8gl\") pod \"nmstate-metrics-58fcddf996-vsmb9\" (UID: \"68088178-12b9-4f0e-afa1-684bd52caf29\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.613402 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntsgz\" (UniqueName: \"kubernetes.io/projected/819d93a0-d662-4188-811f-10078673fa3f-kube-api-access-ntsgz\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.613486 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/819d93a0-d662-4188-811f-10078673fa3f-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.613517 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/819d93a0-d662-4188-811f-10078673fa3f-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.622456 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.656334 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7fdf4758cd-dpr6z"] Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.657093 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.668084 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7fdf4758cd-dpr6z"] Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.680441 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.714479 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/819d93a0-d662-4188-811f-10078673fa3f-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.714725 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntsgz\" (UniqueName: \"kubernetes.io/projected/819d93a0-d662-4188-811f-10078673fa3f-kube-api-access-ntsgz\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.714767 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/819d93a0-d662-4188-811f-10078673fa3f-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:05 crc kubenswrapper[4922]: E0929 22:40:05.714872 4922 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 29 22:40:05 crc kubenswrapper[4922]: E0929 22:40:05.714921 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/819d93a0-d662-4188-811f-10078673fa3f-plugin-serving-cert podName:819d93a0-d662-4188-811f-10078673fa3f nodeName:}" failed. No retries permitted until 2025-09-29 22:40:06.214905667 +0000 UTC m=+810.525194480 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/819d93a0-d662-4188-811f-10078673fa3f-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-kfbdv" (UID: "819d93a0-d662-4188-811f-10078673fa3f") : secret "plugin-serving-cert" not found Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.716522 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/819d93a0-d662-4188-811f-10078673fa3f-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:05 crc kubenswrapper[4922]: W0929 22:40:05.726521 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e49bb85_66d4_471e_96b0_ae49830ad4e2.slice/crio-fab5026916ae7968b28c786bbd3a92ac7ee91fd5033e17f3a22f9a6f5bbe76d0 WatchSource:0}: Error finding container fab5026916ae7968b28c786bbd3a92ac7ee91fd5033e17f3a22f9a6f5bbe76d0: Status 404 returned error can't find the container with id fab5026916ae7968b28c786bbd3a92ac7ee91fd5033e17f3a22f9a6f5bbe76d0 Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.730655 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntsgz\" (UniqueName: \"kubernetes.io/projected/819d93a0-d662-4188-811f-10078673fa3f-kube-api-access-ntsgz\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.816755 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrwps\" (UniqueName: \"kubernetes.io/projected/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-kube-api-access-qrwps\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.817183 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-console-serving-cert\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.817214 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-oauth-serving-cert\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.817248 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-console-oauth-config\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.817274 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-service-ca\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.817313 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-console-config\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.817357 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-trusted-ca-bundle\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.918408 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-trusted-ca-bundle\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.918548 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrwps\" (UniqueName: \"kubernetes.io/projected/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-kube-api-access-qrwps\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.919159 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-console-serving-cert\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.920280 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-oauth-serving-cert\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.920352 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-console-oauth-config\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.920377 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-service-ca\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.920445 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-console-config\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.920781 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-trusted-ca-bundle\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.921544 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-oauth-serving-cert\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.921659 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-console-config\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.922782 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-service-ca\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.939876 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-console-oauth-config\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.940063 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-console-serving-cert\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.942542 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrwps\" (UniqueName: \"kubernetes.io/projected/dc7610d6-07de-4ca2-a62e-74f620c4e4a2-kube-api-access-qrwps\") pod \"console-7fdf4758cd-dpr6z\" (UID: \"dc7610d6-07de-4ca2-a62e-74f620c4e4a2\") " pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:05 crc kubenswrapper[4922]: I0929 22:40:05.984385 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.021771 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3cc45e35-ac73-47d9-809e-408dbd5f0077-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-xk99n\" (UID: \"3cc45e35-ac73-47d9-809e-408dbd5f0077\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.027180 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3cc45e35-ac73-47d9-809e-408dbd5f0077-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-xk99n\" (UID: \"3cc45e35-ac73-47d9-809e-408dbd5f0077\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.086859 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9"] Sep 29 22:40:06 crc kubenswrapper[4922]: W0929 22:40:06.107634 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68088178_12b9_4f0e_afa1_684bd52caf29.slice/crio-242323f8c7d4642084d0333593040659be0d4291e03a6d780e64b04f832106f4 WatchSource:0}: Error finding container 242323f8c7d4642084d0333593040659be0d4291e03a6d780e64b04f832106f4: Status 404 returned error can't find the container with id 242323f8c7d4642084d0333593040659be0d4291e03a6d780e64b04f832106f4 Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.225476 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/819d93a0-d662-4188-811f-10078673fa3f-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.230761 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/819d93a0-d662-4188-811f-10078673fa3f-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-kfbdv\" (UID: \"819d93a0-d662-4188-811f-10078673fa3f\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.271565 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.384336 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.473010 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7fdf4758cd-dpr6z"] Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.557626 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7fdf4758cd-dpr6z" event={"ID":"dc7610d6-07de-4ca2-a62e-74f620c4e4a2","Type":"ContainerStarted","Data":"7e5c85361a612c1cbcf3e2930c8a474d41c4ebe9c54d63efaba992cae2094cf0"} Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.570705 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9" event={"ID":"68088178-12b9-4f0e-afa1-684bd52caf29","Type":"ContainerStarted","Data":"242323f8c7d4642084d0333593040659be0d4291e03a6d780e64b04f832106f4"} Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.574795 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-xk99n"] Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.580628 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-scshm" event={"ID":"0e49bb85-66d4-471e-96b0-ae49830ad4e2","Type":"ContainerStarted","Data":"fab5026916ae7968b28c786bbd3a92ac7ee91fd5033e17f3a22f9a6f5bbe76d0"} Sep 29 22:40:06 crc kubenswrapper[4922]: I0929 22:40:06.873425 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv"] Sep 29 22:40:06 crc kubenswrapper[4922]: W0929 22:40:06.874238 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod819d93a0_d662_4188_811f_10078673fa3f.slice/crio-48bb16248de0ffac72836d0913dd03edc6dbaec895d1556bdba75d5571a56488 WatchSource:0}: Error finding container 48bb16248de0ffac72836d0913dd03edc6dbaec895d1556bdba75d5571a56488: Status 404 returned error can't find the container with id 48bb16248de0ffac72836d0913dd03edc6dbaec895d1556bdba75d5571a56488 Sep 29 22:40:07 crc kubenswrapper[4922]: I0929 22:40:07.587181 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" event={"ID":"819d93a0-d662-4188-811f-10078673fa3f","Type":"ContainerStarted","Data":"48bb16248de0ffac72836d0913dd03edc6dbaec895d1556bdba75d5571a56488"} Sep 29 22:40:07 crc kubenswrapper[4922]: I0929 22:40:07.589497 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7fdf4758cd-dpr6z" event={"ID":"dc7610d6-07de-4ca2-a62e-74f620c4e4a2","Type":"ContainerStarted","Data":"3aaeaec06670a5dbe4df404a67037b8297be7d2f761fd86586942d175dde7c5b"} Sep 29 22:40:07 crc kubenswrapper[4922]: I0929 22:40:07.590744 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" event={"ID":"3cc45e35-ac73-47d9-809e-408dbd5f0077","Type":"ContainerStarted","Data":"2575c0c51ffdc1dcc84f3c37d0dd00d0da45e4b1c8cc8cf2590568e105f45754"} Sep 29 22:40:07 crc kubenswrapper[4922]: I0929 22:40:07.618670 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7fdf4758cd-dpr6z" podStartSLOduration=2.618650802 podStartE2EDuration="2.618650802s" podCreationTimestamp="2025-09-29 22:40:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:40:07.614549621 +0000 UTC m=+811.924838484" watchObservedRunningTime="2025-09-29 22:40:07.618650802 +0000 UTC m=+811.928939615" Sep 29 22:40:09 crc kubenswrapper[4922]: I0929 22:40:09.607200 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" event={"ID":"819d93a0-d662-4188-811f-10078673fa3f","Type":"ContainerStarted","Data":"520d7bbac8fe7c0716d55993de372a0c687931b3f43481799226efb1410d94d8"} Sep 29 22:40:09 crc kubenswrapper[4922]: I0929 22:40:09.610033 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9" event={"ID":"68088178-12b9-4f0e-afa1-684bd52caf29","Type":"ContainerStarted","Data":"16d804d988ccdba79e6027e887db2dc62d135e5bc9e59f09e8b8ee501c9995d6"} Sep 29 22:40:09 crc kubenswrapper[4922]: I0929 22:40:09.610813 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-scshm" event={"ID":"0e49bb85-66d4-471e-96b0-ae49830ad4e2","Type":"ContainerStarted","Data":"2bd62de396ee8fc6238eb004d778b943104c769b7efb4f5c9162881f322c3512"} Sep 29 22:40:09 crc kubenswrapper[4922]: I0929 22:40:09.610929 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:09 crc kubenswrapper[4922]: I0929 22:40:09.612888 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" event={"ID":"3cc45e35-ac73-47d9-809e-408dbd5f0077","Type":"ContainerStarted","Data":"6fb8aefc3e6a40b68aeff660b4b52f72aa9c2ec8f1055fe75bba1541f0c820c8"} Sep 29 22:40:09 crc kubenswrapper[4922]: I0929 22:40:09.613015 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:09 crc kubenswrapper[4922]: I0929 22:40:09.633358 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-kfbdv" podStartSLOduration=2.17861985 podStartE2EDuration="4.633320591s" podCreationTimestamp="2025-09-29 22:40:05 +0000 UTC" firstStartedPulling="2025-09-29 22:40:06.87636342 +0000 UTC m=+811.186652243" lastFinishedPulling="2025-09-29 22:40:09.331064161 +0000 UTC m=+813.641352984" observedRunningTime="2025-09-29 22:40:09.627524749 +0000 UTC m=+813.937813552" watchObservedRunningTime="2025-09-29 22:40:09.633320591 +0000 UTC m=+813.943609454" Sep 29 22:40:09 crc kubenswrapper[4922]: I0929 22:40:09.654082 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" podStartSLOduration=2.850465954 podStartE2EDuration="4.65405577s" podCreationTimestamp="2025-09-29 22:40:05 +0000 UTC" firstStartedPulling="2025-09-29 22:40:06.585663464 +0000 UTC m=+810.895952287" lastFinishedPulling="2025-09-29 22:40:08.38925326 +0000 UTC m=+812.699542103" observedRunningTime="2025-09-29 22:40:09.650019751 +0000 UTC m=+813.960308564" watchObservedRunningTime="2025-09-29 22:40:09.65405577 +0000 UTC m=+813.964344613" Sep 29 22:40:09 crc kubenswrapper[4922]: I0929 22:40:09.673723 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-scshm" podStartSLOduration=2.017494944 podStartE2EDuration="4.673663291s" podCreationTimestamp="2025-09-29 22:40:05 +0000 UTC" firstStartedPulling="2025-09-29 22:40:05.729351481 +0000 UTC m=+810.039640284" lastFinishedPulling="2025-09-29 22:40:08.385519778 +0000 UTC m=+812.695808631" observedRunningTime="2025-09-29 22:40:09.667972412 +0000 UTC m=+813.978261295" watchObservedRunningTime="2025-09-29 22:40:09.673663291 +0000 UTC m=+813.983952134" Sep 29 22:40:11 crc kubenswrapper[4922]: I0929 22:40:11.632232 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9" event={"ID":"68088178-12b9-4f0e-afa1-684bd52caf29","Type":"ContainerStarted","Data":"39ad892da1b0dfba4ec4c336d216687c623b7314bb13ccd7e2cad2a6ec305c4c"} Sep 29 22:40:11 crc kubenswrapper[4922]: I0929 22:40:11.655622 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-vsmb9" podStartSLOduration=1.8278176080000001 podStartE2EDuration="6.655605366s" podCreationTimestamp="2025-09-29 22:40:05 +0000 UTC" firstStartedPulling="2025-09-29 22:40:06.11017184 +0000 UTC m=+810.420460713" lastFinishedPulling="2025-09-29 22:40:10.937959628 +0000 UTC m=+815.248248471" observedRunningTime="2025-09-29 22:40:11.651914246 +0000 UTC m=+815.962203069" watchObservedRunningTime="2025-09-29 22:40:11.655605366 +0000 UTC m=+815.965894179" Sep 29 22:40:15 crc kubenswrapper[4922]: I0929 22:40:15.715641 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-scshm" Sep 29 22:40:15 crc kubenswrapper[4922]: I0929 22:40:15.984738 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:15 crc kubenswrapper[4922]: I0929 22:40:15.984797 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:15 crc kubenswrapper[4922]: I0929 22:40:15.993360 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:16 crc kubenswrapper[4922]: I0929 22:40:16.670665 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7fdf4758cd-dpr6z" Sep 29 22:40:16 crc kubenswrapper[4922]: I0929 22:40:16.752675 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-fq7mw"] Sep 29 22:40:26 crc kubenswrapper[4922]: I0929 22:40:26.282638 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-xk99n" Sep 29 22:40:28 crc kubenswrapper[4922]: I0929 22:40:28.913367 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:40:28 crc kubenswrapper[4922]: I0929 22:40:28.913774 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:40:28 crc kubenswrapper[4922]: I0929 22:40:28.913835 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:40:28 crc kubenswrapper[4922]: I0929 22:40:28.914542 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2fff5dec163c43924ec181a6c7d9ee934e027ea79ccf259ff2b5530d85b03707"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:40:28 crc kubenswrapper[4922]: I0929 22:40:28.914637 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://2fff5dec163c43924ec181a6c7d9ee934e027ea79ccf259ff2b5530d85b03707" gracePeriod=600 Sep 29 22:40:29 crc kubenswrapper[4922]: I0929 22:40:29.767036 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="2fff5dec163c43924ec181a6c7d9ee934e027ea79ccf259ff2b5530d85b03707" exitCode=0 Sep 29 22:40:29 crc kubenswrapper[4922]: I0929 22:40:29.767152 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"2fff5dec163c43924ec181a6c7d9ee934e027ea79ccf259ff2b5530d85b03707"} Sep 29 22:40:29 crc kubenswrapper[4922]: I0929 22:40:29.767783 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"92b5767336a72e147921a9d2961a6367ee20762375b4581c376088ef25b4feea"} Sep 29 22:40:29 crc kubenswrapper[4922]: I0929 22:40:29.767814 4922 scope.go:117] "RemoveContainer" containerID="be2fe9125c1ce8caf5b45073baadd0f21588a94e8d9279d703866cc234e4eaaf" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.772519 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7"] Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.774697 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.777322 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.810492 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-fq7mw" podUID="ddedd179-84f4-4532-9d1b-eed45990a6e2" containerName="console" containerID="cri-o://157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1" gracePeriod=15 Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.824793 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7"] Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.877575 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.877964 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.878231 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlfsw\" (UniqueName: \"kubernetes.io/projected/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-kube-api-access-nlfsw\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.980630 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.980755 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlfsw\" (UniqueName: \"kubernetes.io/projected/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-kube-api-access-nlfsw\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.980803 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.981476 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:41 crc kubenswrapper[4922]: I0929 22:40:41.981473 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.021297 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlfsw\" (UniqueName: \"kubernetes.io/projected/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-kube-api-access-nlfsw\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.111605 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.265270 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-fq7mw_ddedd179-84f4-4532-9d1b-eed45990a6e2/console/0.log" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.265580 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.386024 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-serving-cert\") pod \"ddedd179-84f4-4532-9d1b-eed45990a6e2\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.386100 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-oauth-serving-cert\") pod \"ddedd179-84f4-4532-9d1b-eed45990a6e2\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.386133 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-config\") pod \"ddedd179-84f4-4532-9d1b-eed45990a6e2\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.386154 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-trusted-ca-bundle\") pod \"ddedd179-84f4-4532-9d1b-eed45990a6e2\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.386186 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbpnp\" (UniqueName: \"kubernetes.io/projected/ddedd179-84f4-4532-9d1b-eed45990a6e2-kube-api-access-dbpnp\") pod \"ddedd179-84f4-4532-9d1b-eed45990a6e2\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.386224 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-oauth-config\") pod \"ddedd179-84f4-4532-9d1b-eed45990a6e2\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.386297 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-service-ca\") pod \"ddedd179-84f4-4532-9d1b-eed45990a6e2\" (UID: \"ddedd179-84f4-4532-9d1b-eed45990a6e2\") " Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.387453 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-service-ca" (OuterVolumeSpecName: "service-ca") pod "ddedd179-84f4-4532-9d1b-eed45990a6e2" (UID: "ddedd179-84f4-4532-9d1b-eed45990a6e2"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.387491 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-config" (OuterVolumeSpecName: "console-config") pod "ddedd179-84f4-4532-9d1b-eed45990a6e2" (UID: "ddedd179-84f4-4532-9d1b-eed45990a6e2"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.387629 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "ddedd179-84f4-4532-9d1b-eed45990a6e2" (UID: "ddedd179-84f4-4532-9d1b-eed45990a6e2"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.387649 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "ddedd179-84f4-4532-9d1b-eed45990a6e2" (UID: "ddedd179-84f4-4532-9d1b-eed45990a6e2"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.391572 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "ddedd179-84f4-4532-9d1b-eed45990a6e2" (UID: "ddedd179-84f4-4532-9d1b-eed45990a6e2"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.392151 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "ddedd179-84f4-4532-9d1b-eed45990a6e2" (UID: "ddedd179-84f4-4532-9d1b-eed45990a6e2"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.392464 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddedd179-84f4-4532-9d1b-eed45990a6e2-kube-api-access-dbpnp" (OuterVolumeSpecName: "kube-api-access-dbpnp") pod "ddedd179-84f4-4532-9d1b-eed45990a6e2" (UID: "ddedd179-84f4-4532-9d1b-eed45990a6e2"). InnerVolumeSpecName "kube-api-access-dbpnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.487813 4922 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.487941 4922 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.488023 4922 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.488055 4922 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.488080 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbpnp\" (UniqueName: \"kubernetes.io/projected/ddedd179-84f4-4532-9d1b-eed45990a6e2-kube-api-access-dbpnp\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.488107 4922 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ddedd179-84f4-4532-9d1b-eed45990a6e2-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.488131 4922 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ddedd179-84f4-4532-9d1b-eed45990a6e2-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.590650 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7"] Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.868382 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-fq7mw_ddedd179-84f4-4532-9d1b-eed45990a6e2/console/0.log" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.868676 4922 generic.go:334] "Generic (PLEG): container finished" podID="ddedd179-84f4-4532-9d1b-eed45990a6e2" containerID="157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1" exitCode=2 Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.868733 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fq7mw" event={"ID":"ddedd179-84f4-4532-9d1b-eed45990a6e2","Type":"ContainerDied","Data":"157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1"} Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.868763 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-fq7mw" event={"ID":"ddedd179-84f4-4532-9d1b-eed45990a6e2","Type":"ContainerDied","Data":"75c00409f353e8c0fcbc48768a638440ae85c8db15fddf1f137a0b11d5423d4a"} Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.868782 4922 scope.go:117] "RemoveContainer" containerID="157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.868882 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-fq7mw" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.872316 4922 generic.go:334] "Generic (PLEG): container finished" podID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerID="78b5baf928333d1e75746bfc93f44744e56bf19814c324fc76b15e580c1b06e3" exitCode=0 Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.872538 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" event={"ID":"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92","Type":"ContainerDied","Data":"78b5baf928333d1e75746bfc93f44744e56bf19814c324fc76b15e580c1b06e3"} Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.872646 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" event={"ID":"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92","Type":"ContainerStarted","Data":"ed3fca77b230b26610b80c5377ed8d03620652a192aea273068d2ef37ac1b29a"} Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.938030 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-fq7mw"] Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.944011 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-fq7mw"] Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.945152 4922 scope.go:117] "RemoveContainer" containerID="157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1" Sep 29 22:40:42 crc kubenswrapper[4922]: E0929 22:40:42.945868 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1\": container with ID starting with 157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1 not found: ID does not exist" containerID="157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1" Sep 29 22:40:42 crc kubenswrapper[4922]: I0929 22:40:42.945931 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1"} err="failed to get container status \"157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1\": rpc error: code = NotFound desc = could not find container \"157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1\": container with ID starting with 157e083b61032a90f02206f5ac3ff2f2341d4d6c4a2b1002fea4766e37c541a1 not found: ID does not exist" Sep 29 22:40:44 crc kubenswrapper[4922]: I0929 22:40:44.437644 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddedd179-84f4-4532-9d1b-eed45990a6e2" path="/var/lib/kubelet/pods/ddedd179-84f4-4532-9d1b-eed45990a6e2/volumes" Sep 29 22:40:44 crc kubenswrapper[4922]: I0929 22:40:44.890880 4922 generic.go:334] "Generic (PLEG): container finished" podID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerID="8b2e2e343897a458b125dc09bfb17bf457544b43d3ec4834bb2214318c4900af" exitCode=0 Sep 29 22:40:44 crc kubenswrapper[4922]: I0929 22:40:44.890986 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" event={"ID":"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92","Type":"ContainerDied","Data":"8b2e2e343897a458b125dc09bfb17bf457544b43d3ec4834bb2214318c4900af"} Sep 29 22:40:45 crc kubenswrapper[4922]: I0929 22:40:45.904036 4922 generic.go:334] "Generic (PLEG): container finished" podID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerID="8317b7a5a3fe69b0e929a32ea773aaaab7f632a1740833425585530b8b96e13c" exitCode=0 Sep 29 22:40:45 crc kubenswrapper[4922]: I0929 22:40:45.904103 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" event={"ID":"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92","Type":"ContainerDied","Data":"8317b7a5a3fe69b0e929a32ea773aaaab7f632a1740833425585530b8b96e13c"} Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.251948 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.263456 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-util\") pod \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.263662 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-bundle\") pod \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.263694 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlfsw\" (UniqueName: \"kubernetes.io/projected/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-kube-api-access-nlfsw\") pod \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\" (UID: \"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92\") " Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.264983 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-bundle" (OuterVolumeSpecName: "bundle") pod "7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" (UID: "7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.265319 4922 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.269765 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-kube-api-access-nlfsw" (OuterVolumeSpecName: "kube-api-access-nlfsw") pod "7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" (UID: "7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92"). InnerVolumeSpecName "kube-api-access-nlfsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.308939 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-util" (OuterVolumeSpecName: "util") pod "7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" (UID: "7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.366894 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlfsw\" (UniqueName: \"kubernetes.io/projected/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-kube-api-access-nlfsw\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.366981 4922 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92-util\") on node \"crc\" DevicePath \"\"" Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.927154 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" event={"ID":"7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92","Type":"ContainerDied","Data":"ed3fca77b230b26610b80c5377ed8d03620652a192aea273068d2ef37ac1b29a"} Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.927215 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7" Sep 29 22:40:47 crc kubenswrapper[4922]: I0929 22:40:47.927224 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed3fca77b230b26610b80c5377ed8d03620652a192aea273068d2ef37ac1b29a" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.789316 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-c87f67444-888l5"] Sep 29 22:40:56 crc kubenswrapper[4922]: E0929 22:40:56.790182 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddedd179-84f4-4532-9d1b-eed45990a6e2" containerName="console" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.790201 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddedd179-84f4-4532-9d1b-eed45990a6e2" containerName="console" Sep 29 22:40:56 crc kubenswrapper[4922]: E0929 22:40:56.790217 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerName="util" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.790224 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerName="util" Sep 29 22:40:56 crc kubenswrapper[4922]: E0929 22:40:56.790237 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerName="extract" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.790245 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerName="extract" Sep 29 22:40:56 crc kubenswrapper[4922]: E0929 22:40:56.790260 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerName="pull" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.790268 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerName="pull" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.790416 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92" containerName="extract" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.790431 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddedd179-84f4-4532-9d1b-eed45990a6e2" containerName="console" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.790879 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.794422 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.794448 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.794844 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-wx7xr" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.794975 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.795341 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.803703 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-c87f67444-888l5"] Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.805464 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqzgk\" (UniqueName: \"kubernetes.io/projected/0b4b3f98-7191-422c-a8de-afbad5b8cd5d-kube-api-access-gqzgk\") pod \"metallb-operator-controller-manager-c87f67444-888l5\" (UID: \"0b4b3f98-7191-422c-a8de-afbad5b8cd5d\") " pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.805528 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0b4b3f98-7191-422c-a8de-afbad5b8cd5d-apiservice-cert\") pod \"metallb-operator-controller-manager-c87f67444-888l5\" (UID: \"0b4b3f98-7191-422c-a8de-afbad5b8cd5d\") " pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.805554 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0b4b3f98-7191-422c-a8de-afbad5b8cd5d-webhook-cert\") pod \"metallb-operator-controller-manager-c87f67444-888l5\" (UID: \"0b4b3f98-7191-422c-a8de-afbad5b8cd5d\") " pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.906235 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0b4b3f98-7191-422c-a8de-afbad5b8cd5d-apiservice-cert\") pod \"metallb-operator-controller-manager-c87f67444-888l5\" (UID: \"0b4b3f98-7191-422c-a8de-afbad5b8cd5d\") " pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.906296 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0b4b3f98-7191-422c-a8de-afbad5b8cd5d-webhook-cert\") pod \"metallb-operator-controller-manager-c87f67444-888l5\" (UID: \"0b4b3f98-7191-422c-a8de-afbad5b8cd5d\") " pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.906333 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqzgk\" (UniqueName: \"kubernetes.io/projected/0b4b3f98-7191-422c-a8de-afbad5b8cd5d-kube-api-access-gqzgk\") pod \"metallb-operator-controller-manager-c87f67444-888l5\" (UID: \"0b4b3f98-7191-422c-a8de-afbad5b8cd5d\") " pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.912616 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0b4b3f98-7191-422c-a8de-afbad5b8cd5d-webhook-cert\") pod \"metallb-operator-controller-manager-c87f67444-888l5\" (UID: \"0b4b3f98-7191-422c-a8de-afbad5b8cd5d\") " pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.916124 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0b4b3f98-7191-422c-a8de-afbad5b8cd5d-apiservice-cert\") pod \"metallb-operator-controller-manager-c87f67444-888l5\" (UID: \"0b4b3f98-7191-422c-a8de-afbad5b8cd5d\") " pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:56 crc kubenswrapper[4922]: I0929 22:40:56.922527 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqzgk\" (UniqueName: \"kubernetes.io/projected/0b4b3f98-7191-422c-a8de-afbad5b8cd5d-kube-api-access-gqzgk\") pod \"metallb-operator-controller-manager-c87f67444-888l5\" (UID: \"0b4b3f98-7191-422c-a8de-afbad5b8cd5d\") " pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.143899 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.264059 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s"] Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.264704 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.270122 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-bzsdk" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.270447 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.273984 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.284141 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s"] Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.312154 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c8gg\" (UniqueName: \"kubernetes.io/projected/18415984-47a8-4a45-ad81-aa058b10d744-kube-api-access-6c8gg\") pod \"metallb-operator-webhook-server-75dd84bf76-bcl8s\" (UID: \"18415984-47a8-4a45-ad81-aa058b10d744\") " pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.312211 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/18415984-47a8-4a45-ad81-aa058b10d744-webhook-cert\") pod \"metallb-operator-webhook-server-75dd84bf76-bcl8s\" (UID: \"18415984-47a8-4a45-ad81-aa058b10d744\") " pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.312230 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/18415984-47a8-4a45-ad81-aa058b10d744-apiservice-cert\") pod \"metallb-operator-webhook-server-75dd84bf76-bcl8s\" (UID: \"18415984-47a8-4a45-ad81-aa058b10d744\") " pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.412975 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c8gg\" (UniqueName: \"kubernetes.io/projected/18415984-47a8-4a45-ad81-aa058b10d744-kube-api-access-6c8gg\") pod \"metallb-operator-webhook-server-75dd84bf76-bcl8s\" (UID: \"18415984-47a8-4a45-ad81-aa058b10d744\") " pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.413352 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/18415984-47a8-4a45-ad81-aa058b10d744-webhook-cert\") pod \"metallb-operator-webhook-server-75dd84bf76-bcl8s\" (UID: \"18415984-47a8-4a45-ad81-aa058b10d744\") " pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.413383 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/18415984-47a8-4a45-ad81-aa058b10d744-apiservice-cert\") pod \"metallb-operator-webhook-server-75dd84bf76-bcl8s\" (UID: \"18415984-47a8-4a45-ad81-aa058b10d744\") " pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.417408 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/18415984-47a8-4a45-ad81-aa058b10d744-webhook-cert\") pod \"metallb-operator-webhook-server-75dd84bf76-bcl8s\" (UID: \"18415984-47a8-4a45-ad81-aa058b10d744\") " pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.423006 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/18415984-47a8-4a45-ad81-aa058b10d744-apiservice-cert\") pod \"metallb-operator-webhook-server-75dd84bf76-bcl8s\" (UID: \"18415984-47a8-4a45-ad81-aa058b10d744\") " pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.431354 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c8gg\" (UniqueName: \"kubernetes.io/projected/18415984-47a8-4a45-ad81-aa058b10d744-kube-api-access-6c8gg\") pod \"metallb-operator-webhook-server-75dd84bf76-bcl8s\" (UID: \"18415984-47a8-4a45-ad81-aa058b10d744\") " pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.578073 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.693904 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-c87f67444-888l5"] Sep 29 22:40:57 crc kubenswrapper[4922]: W0929 22:40:57.699841 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b4b3f98_7191_422c_a8de_afbad5b8cd5d.slice/crio-f209bb09ab265cf43f3bff691ec3b0e8991e2b56d27089c9aa9bdaa5eeb87401 WatchSource:0}: Error finding container f209bb09ab265cf43f3bff691ec3b0e8991e2b56d27089c9aa9bdaa5eeb87401: Status 404 returned error can't find the container with id f209bb09ab265cf43f3bff691ec3b0e8991e2b56d27089c9aa9bdaa5eeb87401 Sep 29 22:40:57 crc kubenswrapper[4922]: I0929 22:40:57.999377 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" event={"ID":"0b4b3f98-7191-422c-a8de-afbad5b8cd5d","Type":"ContainerStarted","Data":"f209bb09ab265cf43f3bff691ec3b0e8991e2b56d27089c9aa9bdaa5eeb87401"} Sep 29 22:40:58 crc kubenswrapper[4922]: I0929 22:40:58.034177 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s"] Sep 29 22:40:58 crc kubenswrapper[4922]: W0929 22:40:58.052040 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18415984_47a8_4a45_ad81_aa058b10d744.slice/crio-d388bd163d3c756f0eb07f742d76ff70f11bb6df9312f7fe290f3960cee1383d WatchSource:0}: Error finding container d388bd163d3c756f0eb07f742d76ff70f11bb6df9312f7fe290f3960cee1383d: Status 404 returned error can't find the container with id d388bd163d3c756f0eb07f742d76ff70f11bb6df9312f7fe290f3960cee1383d Sep 29 22:40:59 crc kubenswrapper[4922]: I0929 22:40:59.013599 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" event={"ID":"18415984-47a8-4a45-ad81-aa058b10d744","Type":"ContainerStarted","Data":"d388bd163d3c756f0eb07f742d76ff70f11bb6df9312f7fe290f3960cee1383d"} Sep 29 22:41:02 crc kubenswrapper[4922]: I0929 22:41:02.029700 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" event={"ID":"0b4b3f98-7191-422c-a8de-afbad5b8cd5d","Type":"ContainerStarted","Data":"395576a6bdb01ccba65880d4e21c0c97ad3e36535ceea060fcb190fea922c81a"} Sep 29 22:41:02 crc kubenswrapper[4922]: I0929 22:41:02.030919 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:41:03 crc kubenswrapper[4922]: I0929 22:41:03.037117 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" event={"ID":"18415984-47a8-4a45-ad81-aa058b10d744","Type":"ContainerStarted","Data":"83981d0d91d312c5fc33ddcbe559ac52b8cb9c12bd171c3639566d9d88fa35ea"} Sep 29 22:41:03 crc kubenswrapper[4922]: I0929 22:41:03.061810 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" podStartSLOduration=3.765598842 podStartE2EDuration="7.061791096s" podCreationTimestamp="2025-09-29 22:40:56 +0000 UTC" firstStartedPulling="2025-09-29 22:40:57.702053233 +0000 UTC m=+862.012342056" lastFinishedPulling="2025-09-29 22:41:00.998245497 +0000 UTC m=+865.308534310" observedRunningTime="2025-09-29 22:41:02.049622481 +0000 UTC m=+866.359911294" watchObservedRunningTime="2025-09-29 22:41:03.061791096 +0000 UTC m=+867.372079919" Sep 29 22:41:03 crc kubenswrapper[4922]: I0929 22:41:03.061996 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" podStartSLOduration=1.3782665619999999 podStartE2EDuration="6.061990991s" podCreationTimestamp="2025-09-29 22:40:57 +0000 UTC" firstStartedPulling="2025-09-29 22:40:58.056378979 +0000 UTC m=+862.366667832" lastFinishedPulling="2025-09-29 22:41:02.740103448 +0000 UTC m=+867.050392261" observedRunningTime="2025-09-29 22:41:03.061677473 +0000 UTC m=+867.371966296" watchObservedRunningTime="2025-09-29 22:41:03.061990991 +0000 UTC m=+867.372279814" Sep 29 22:41:04 crc kubenswrapper[4922]: I0929 22:41:04.042877 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:41:17 crc kubenswrapper[4922]: I0929 22:41:17.585185 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-75dd84bf76-bcl8s" Sep 29 22:41:37 crc kubenswrapper[4922]: I0929 22:41:37.147188 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-c87f67444-888l5" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.095682 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc"] Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.096514 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.099840 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-x5dmq" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.099848 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.104421 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-7zvc7"] Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.107779 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.110691 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.110785 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.128155 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc"] Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.179251 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-frr-conf\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.179361 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-frr-startup\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.179443 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-frr-sockets\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.179498 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-reloader\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.179520 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89xd4\" (UniqueName: \"kubernetes.io/projected/6975026a-8111-4858-80b0-bf06609ac878-kube-api-access-89xd4\") pod \"frr-k8s-webhook-server-5478bdb765-nnsjc\" (UID: \"6975026a-8111-4858-80b0-bf06609ac878\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.179550 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9ptv\" (UniqueName: \"kubernetes.io/projected/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-kube-api-access-m9ptv\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.179602 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-metrics-certs\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.179654 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6975026a-8111-4858-80b0-bf06609ac878-cert\") pod \"frr-k8s-webhook-server-5478bdb765-nnsjc\" (UID: \"6975026a-8111-4858-80b0-bf06609ac878\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.179681 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-metrics\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.183779 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-qj9cs"] Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.184886 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.188259 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.188322 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.188514 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-rl6hf" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.190347 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.203594 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-vvmvc"] Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.204839 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.209955 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-vvmvc"] Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.216745 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280176 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-metrics\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280227 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-metallb-excludel2\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280254 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-memberlist\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280276 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-metrics-certs\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280306 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-frr-conf\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280337 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-frr-startup\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280492 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-frr-sockets\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280549 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-reloader\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280568 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89xd4\" (UniqueName: \"kubernetes.io/projected/6975026a-8111-4858-80b0-bf06609ac878-kube-api-access-89xd4\") pod \"frr-k8s-webhook-server-5478bdb765-nnsjc\" (UID: \"6975026a-8111-4858-80b0-bf06609ac878\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280599 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-cert\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280625 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9ptv\" (UniqueName: \"kubernetes.io/projected/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-kube-api-access-m9ptv\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280645 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-metrics-certs\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280667 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm8mk\" (UniqueName: \"kubernetes.io/projected/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-kube-api-access-nm8mk\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280855 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-metrics-certs\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280853 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-metrics\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280929 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-frr-conf\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.280969 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6975026a-8111-4858-80b0-bf06609ac878-cert\") pod \"frr-k8s-webhook-server-5478bdb765-nnsjc\" (UID: \"6975026a-8111-4858-80b0-bf06609ac878\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.281032 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxkhh\" (UniqueName: \"kubernetes.io/projected/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-kube-api-access-sxkhh\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.281101 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-reloader\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.281185 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-frr-sockets\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.281673 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-frr-startup\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.285753 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-metrics-certs\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.286012 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6975026a-8111-4858-80b0-bf06609ac878-cert\") pod \"frr-k8s-webhook-server-5478bdb765-nnsjc\" (UID: \"6975026a-8111-4858-80b0-bf06609ac878\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.298544 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9ptv\" (UniqueName: \"kubernetes.io/projected/7fa14b20-9e00-4c61-9a3e-b064d2244eb8-kube-api-access-m9ptv\") pod \"frr-k8s-7zvc7\" (UID: \"7fa14b20-9e00-4c61-9a3e-b064d2244eb8\") " pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.300641 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89xd4\" (UniqueName: \"kubernetes.io/projected/6975026a-8111-4858-80b0-bf06609ac878-kube-api-access-89xd4\") pod \"frr-k8s-webhook-server-5478bdb765-nnsjc\" (UID: \"6975026a-8111-4858-80b0-bf06609ac878\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.381537 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-cert\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.381578 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm8mk\" (UniqueName: \"kubernetes.io/projected/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-kube-api-access-nm8mk\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.381603 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-metrics-certs\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.381644 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxkhh\" (UniqueName: \"kubernetes.io/projected/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-kube-api-access-sxkhh\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.381664 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-metallb-excludel2\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.381679 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-memberlist\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.381695 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-metrics-certs\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: E0929 22:41:38.382150 4922 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Sep 29 22:41:38 crc kubenswrapper[4922]: E0929 22:41:38.382191 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-metrics-certs podName:5d45b0ab-59c1-49eb-8b38-8343f7a246a1 nodeName:}" failed. No retries permitted until 2025-09-29 22:41:38.882175742 +0000 UTC m=+903.192464555 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-metrics-certs") pod "controller-5d688f5ffc-vvmvc" (UID: "5d45b0ab-59c1-49eb-8b38-8343f7a246a1") : secret "controller-certs-secret" not found Sep 29 22:41:38 crc kubenswrapper[4922]: E0929 22:41:38.382339 4922 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 22:41:38 crc kubenswrapper[4922]: E0929 22:41:38.382361 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-memberlist podName:3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58 nodeName:}" failed. No retries permitted until 2025-09-29 22:41:38.882354576 +0000 UTC m=+903.192643379 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-memberlist") pod "speaker-qj9cs" (UID: "3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58") : secret "metallb-memberlist" not found Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.382749 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-metallb-excludel2\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.383537 4922 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.387720 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-metrics-certs\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.397707 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-cert\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.398100 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm8mk\" (UniqueName: \"kubernetes.io/projected/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-kube-api-access-nm8mk\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.398883 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxkhh\" (UniqueName: \"kubernetes.io/projected/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-kube-api-access-sxkhh\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.413127 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.423965 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.641008 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc"] Sep 29 22:41:38 crc kubenswrapper[4922]: W0929 22:41:38.646649 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6975026a_8111_4858_80b0_bf06609ac878.slice/crio-c2c430594f800f4ccafb3fc1659ffe03a645ae1b4f4d6c3eb49bada23b4279e0 WatchSource:0}: Error finding container c2c430594f800f4ccafb3fc1659ffe03a645ae1b4f4d6c3eb49bada23b4279e0: Status 404 returned error can't find the container with id c2c430594f800f4ccafb3fc1659ffe03a645ae1b4f4d6c3eb49bada23b4279e0 Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.888006 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-metrics-certs\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.888081 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-memberlist\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:38 crc kubenswrapper[4922]: E0929 22:41:38.888198 4922 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 22:41:38 crc kubenswrapper[4922]: E0929 22:41:38.888242 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-memberlist podName:3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58 nodeName:}" failed. No retries permitted until 2025-09-29 22:41:39.888227968 +0000 UTC m=+904.198516781 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-memberlist") pod "speaker-qj9cs" (UID: "3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58") : secret "metallb-memberlist" not found Sep 29 22:41:38 crc kubenswrapper[4922]: I0929 22:41:38.892519 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5d45b0ab-59c1-49eb-8b38-8343f7a246a1-metrics-certs\") pod \"controller-5d688f5ffc-vvmvc\" (UID: \"5d45b0ab-59c1-49eb-8b38-8343f7a246a1\") " pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:39 crc kubenswrapper[4922]: I0929 22:41:39.124849 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:39 crc kubenswrapper[4922]: I0929 22:41:39.265856 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" event={"ID":"6975026a-8111-4858-80b0-bf06609ac878","Type":"ContainerStarted","Data":"c2c430594f800f4ccafb3fc1659ffe03a645ae1b4f4d6c3eb49bada23b4279e0"} Sep 29 22:41:39 crc kubenswrapper[4922]: I0929 22:41:39.267113 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerStarted","Data":"361bedcf0f2497606dbe55aff7a8efe72e86f159d15f3df30465c3b55bad16ae"} Sep 29 22:41:39 crc kubenswrapper[4922]: I0929 22:41:39.361360 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-vvmvc"] Sep 29 22:41:39 crc kubenswrapper[4922]: W0929 22:41:39.368419 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d45b0ab_59c1_49eb_8b38_8343f7a246a1.slice/crio-b72094371bd29255deb04706a4dd0fd3af6edbbf82452f9d78db12ea9700ce62 WatchSource:0}: Error finding container b72094371bd29255deb04706a4dd0fd3af6edbbf82452f9d78db12ea9700ce62: Status 404 returned error can't find the container with id b72094371bd29255deb04706a4dd0fd3af6edbbf82452f9d78db12ea9700ce62 Sep 29 22:41:39 crc kubenswrapper[4922]: I0929 22:41:39.906201 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-memberlist\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:39 crc kubenswrapper[4922]: I0929 22:41:39.913867 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58-memberlist\") pod \"speaker-qj9cs\" (UID: \"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58\") " pod="metallb-system/speaker-qj9cs" Sep 29 22:41:40 crc kubenswrapper[4922]: I0929 22:41:40.009823 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-qj9cs" Sep 29 22:41:40 crc kubenswrapper[4922]: W0929 22:41:40.035359 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c4f6284_1ca8_4204_87b1_aaa1c6a2fb58.slice/crio-3bc0791f72905670b64d686c4ef90fe14f6d015087f863c018f6d06b318f545f WatchSource:0}: Error finding container 3bc0791f72905670b64d686c4ef90fe14f6d015087f863c018f6d06b318f545f: Status 404 returned error can't find the container with id 3bc0791f72905670b64d686c4ef90fe14f6d015087f863c018f6d06b318f545f Sep 29 22:41:40 crc kubenswrapper[4922]: I0929 22:41:40.274742 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-qj9cs" event={"ID":"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58","Type":"ContainerStarted","Data":"3bc0791f72905670b64d686c4ef90fe14f6d015087f863c018f6d06b318f545f"} Sep 29 22:41:40 crc kubenswrapper[4922]: I0929 22:41:40.277442 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-vvmvc" event={"ID":"5d45b0ab-59c1-49eb-8b38-8343f7a246a1","Type":"ContainerStarted","Data":"893ef3f0c0aede75a5aef81468993e4ae0e31d02da2e08c7732207b7ef14b54b"} Sep 29 22:41:40 crc kubenswrapper[4922]: I0929 22:41:40.277522 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-vvmvc" event={"ID":"5d45b0ab-59c1-49eb-8b38-8343f7a246a1","Type":"ContainerStarted","Data":"7cc606e1e8720b04c39e63e78372f0f7080cc0d32db7f5fec7d7e554fca44966"} Sep 29 22:41:40 crc kubenswrapper[4922]: I0929 22:41:40.277541 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-vvmvc" event={"ID":"5d45b0ab-59c1-49eb-8b38-8343f7a246a1","Type":"ContainerStarted","Data":"b72094371bd29255deb04706a4dd0fd3af6edbbf82452f9d78db12ea9700ce62"} Sep 29 22:41:40 crc kubenswrapper[4922]: I0929 22:41:40.277611 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:41 crc kubenswrapper[4922]: I0929 22:41:41.284619 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-qj9cs" event={"ID":"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58","Type":"ContainerStarted","Data":"2fa98bda65ad4506737878505b7957c55e03a0588d6d43a39e492b23f656ac40"} Sep 29 22:41:41 crc kubenswrapper[4922]: I0929 22:41:41.285010 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-qj9cs" event={"ID":"3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58","Type":"ContainerStarted","Data":"8cb150379a6c0a1f186e26b75d840ce8396b1ce6dba073f765c9a13b6b261c2a"} Sep 29 22:41:41 crc kubenswrapper[4922]: I0929 22:41:41.286687 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-qj9cs" Sep 29 22:41:41 crc kubenswrapper[4922]: I0929 22:41:41.306055 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-qj9cs" podStartSLOduration=3.3060387 podStartE2EDuration="3.3060387s" podCreationTimestamp="2025-09-29 22:41:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:41:41.305442406 +0000 UTC m=+905.615731219" watchObservedRunningTime="2025-09-29 22:41:41.3060387 +0000 UTC m=+905.616327513" Sep 29 22:41:41 crc kubenswrapper[4922]: I0929 22:41:41.308514 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-vvmvc" podStartSLOduration=3.30850695 podStartE2EDuration="3.30850695s" podCreationTimestamp="2025-09-29 22:41:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:41:40.299955404 +0000 UTC m=+904.610244227" watchObservedRunningTime="2025-09-29 22:41:41.30850695 +0000 UTC m=+905.618795763" Sep 29 22:41:46 crc kubenswrapper[4922]: I0929 22:41:46.335643 4922 generic.go:334] "Generic (PLEG): container finished" podID="7fa14b20-9e00-4c61-9a3e-b064d2244eb8" containerID="364f92016dd12493ad093fb7fe17a3a5df171ccca8cfea44a96b17c8a59bde53" exitCode=0 Sep 29 22:41:46 crc kubenswrapper[4922]: I0929 22:41:46.335769 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerDied","Data":"364f92016dd12493ad093fb7fe17a3a5df171ccca8cfea44a96b17c8a59bde53"} Sep 29 22:41:46 crc kubenswrapper[4922]: I0929 22:41:46.340120 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" event={"ID":"6975026a-8111-4858-80b0-bf06609ac878","Type":"ContainerStarted","Data":"d892efe6865f2109b6ee3865306ff6b70895b55b1cac6e69e4e872107c552414"} Sep 29 22:41:46 crc kubenswrapper[4922]: I0929 22:41:46.340360 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:46 crc kubenswrapper[4922]: I0929 22:41:46.384058 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" podStartSLOduration=1.356741362 podStartE2EDuration="8.384034274s" podCreationTimestamp="2025-09-29 22:41:38 +0000 UTC" firstStartedPulling="2025-09-29 22:41:38.64992731 +0000 UTC m=+902.960216123" lastFinishedPulling="2025-09-29 22:41:45.677220182 +0000 UTC m=+909.987509035" observedRunningTime="2025-09-29 22:41:46.38059265 +0000 UTC m=+910.690881503" watchObservedRunningTime="2025-09-29 22:41:46.384034274 +0000 UTC m=+910.694323127" Sep 29 22:41:47 crc kubenswrapper[4922]: I0929 22:41:47.349166 4922 generic.go:334] "Generic (PLEG): container finished" podID="7fa14b20-9e00-4c61-9a3e-b064d2244eb8" containerID="ddb63e021b995a6ca92b6b8346539a3ab2f5256b3bbfebc65d8be2d63effb3fb" exitCode=0 Sep 29 22:41:47 crc kubenswrapper[4922]: I0929 22:41:47.349209 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerDied","Data":"ddb63e021b995a6ca92b6b8346539a3ab2f5256b3bbfebc65d8be2d63effb3fb"} Sep 29 22:41:48 crc kubenswrapper[4922]: I0929 22:41:48.360718 4922 generic.go:334] "Generic (PLEG): container finished" podID="7fa14b20-9e00-4c61-9a3e-b064d2244eb8" containerID="ca954c48161da7c56ddde32ab23511db3ffbb671df7baf3d7cd08ede62c631c6" exitCode=0 Sep 29 22:41:48 crc kubenswrapper[4922]: I0929 22:41:48.360803 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerDied","Data":"ca954c48161da7c56ddde32ab23511db3ffbb671df7baf3d7cd08ede62c631c6"} Sep 29 22:41:49 crc kubenswrapper[4922]: I0929 22:41:49.131150 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-vvmvc" Sep 29 22:41:49 crc kubenswrapper[4922]: I0929 22:41:49.373164 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerStarted","Data":"2e69785006ba3e18cea25280c8162a5e7ff65a85de1f4ad9a405a7efc01660bf"} Sep 29 22:41:49 crc kubenswrapper[4922]: I0929 22:41:49.373211 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerStarted","Data":"1794ffef81b46434a5834fc0cbfdcad4df4d24b0d0358130b9da2e239d2f1af3"} Sep 29 22:41:49 crc kubenswrapper[4922]: I0929 22:41:49.373225 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerStarted","Data":"3011e4d3c38c25681df13d6cb0b04c7a5ee98558120d8d2cf02efb3b6e7626e1"} Sep 29 22:41:49 crc kubenswrapper[4922]: I0929 22:41:49.373236 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerStarted","Data":"54f65b57a113ff3175febb4aeb425d340eaa68e881b4bf156a410b655cabe5e9"} Sep 29 22:41:49 crc kubenswrapper[4922]: I0929 22:41:49.373247 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerStarted","Data":"562e88a5993ae6432dcff27beccfa5086fb98d2377f404711e960f08ee67305a"} Sep 29 22:41:50 crc kubenswrapper[4922]: I0929 22:41:50.013746 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-qj9cs" Sep 29 22:41:50 crc kubenswrapper[4922]: I0929 22:41:50.389875 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7zvc7" event={"ID":"7fa14b20-9e00-4c61-9a3e-b064d2244eb8","Type":"ContainerStarted","Data":"f2eba47c0790cef9f303e1446a4fb021351f96cf70990659822aa4d692d5caa8"} Sep 29 22:41:50 crc kubenswrapper[4922]: I0929 22:41:50.391567 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:50 crc kubenswrapper[4922]: I0929 22:41:50.433123 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-7zvc7" podStartSLOduration=5.307214354 podStartE2EDuration="12.433101018s" podCreationTimestamp="2025-09-29 22:41:38 +0000 UTC" firstStartedPulling="2025-09-29 22:41:38.557231583 +0000 UTC m=+902.867520396" lastFinishedPulling="2025-09-29 22:41:45.683118207 +0000 UTC m=+909.993407060" observedRunningTime="2025-09-29 22:41:50.430352701 +0000 UTC m=+914.740641554" watchObservedRunningTime="2025-09-29 22:41:50.433101018 +0000 UTC m=+914.743389851" Sep 29 22:41:51 crc kubenswrapper[4922]: I0929 22:41:51.940899 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q"] Sep 29 22:41:51 crc kubenswrapper[4922]: I0929 22:41:51.942172 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:51 crc kubenswrapper[4922]: I0929 22:41:51.945426 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 22:41:51 crc kubenswrapper[4922]: I0929 22:41:51.971474 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q"] Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.084087 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.084164 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swbr7\" (UniqueName: \"kubernetes.io/projected/ca2c2f99-b018-4313-90e5-73ae578f6717-kube-api-access-swbr7\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.084197 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.185531 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swbr7\" (UniqueName: \"kubernetes.io/projected/ca2c2f99-b018-4313-90e5-73ae578f6717-kube-api-access-swbr7\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.185678 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.185856 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.186211 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.186490 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.215440 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swbr7\" (UniqueName: \"kubernetes.io/projected/ca2c2f99-b018-4313-90e5-73ae578f6717-kube-api-access-swbr7\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.278873 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:41:52 crc kubenswrapper[4922]: I0929 22:41:52.510854 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q"] Sep 29 22:41:52 crc kubenswrapper[4922]: W0929 22:41:52.521872 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca2c2f99_b018_4313_90e5_73ae578f6717.slice/crio-249725733c1cf912fb008832ba46df22df8ef0774a1229ad6cb4ed2d549e52e0 WatchSource:0}: Error finding container 249725733c1cf912fb008832ba46df22df8ef0774a1229ad6cb4ed2d549e52e0: Status 404 returned error can't find the container with id 249725733c1cf912fb008832ba46df22df8ef0774a1229ad6cb4ed2d549e52e0 Sep 29 22:41:53 crc kubenswrapper[4922]: I0929 22:41:53.407095 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" event={"ID":"ca2c2f99-b018-4313-90e5-73ae578f6717","Type":"ContainerStarted","Data":"78f851b8843092a4e17c881180a6d036ca8550b7905f3185a71feeb3fd6d3717"} Sep 29 22:41:53 crc kubenswrapper[4922]: I0929 22:41:53.407532 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" event={"ID":"ca2c2f99-b018-4313-90e5-73ae578f6717","Type":"ContainerStarted","Data":"249725733c1cf912fb008832ba46df22df8ef0774a1229ad6cb4ed2d549e52e0"} Sep 29 22:41:53 crc kubenswrapper[4922]: I0929 22:41:53.424748 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:53 crc kubenswrapper[4922]: I0929 22:41:53.489874 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:54 crc kubenswrapper[4922]: I0929 22:41:54.414348 4922 generic.go:334] "Generic (PLEG): container finished" podID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerID="78f851b8843092a4e17c881180a6d036ca8550b7905f3185a71feeb3fd6d3717" exitCode=0 Sep 29 22:41:54 crc kubenswrapper[4922]: I0929 22:41:54.414470 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" event={"ID":"ca2c2f99-b018-4313-90e5-73ae578f6717","Type":"ContainerDied","Data":"78f851b8843092a4e17c881180a6d036ca8550b7905f3185a71feeb3fd6d3717"} Sep 29 22:41:58 crc kubenswrapper[4922]: I0929 22:41:58.419071 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-nnsjc" Sep 29 22:41:58 crc kubenswrapper[4922]: I0929 22:41:58.436739 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-7zvc7" Sep 29 22:41:58 crc kubenswrapper[4922]: I0929 22:41:58.445016 4922 generic.go:334] "Generic (PLEG): container finished" podID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerID="92aa0de71716c520236ba7e1e3fd6dcfe6fccc5d3941a39d53496d28a73b9f0e" exitCode=0 Sep 29 22:41:58 crc kubenswrapper[4922]: I0929 22:41:58.445110 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" event={"ID":"ca2c2f99-b018-4313-90e5-73ae578f6717","Type":"ContainerDied","Data":"92aa0de71716c520236ba7e1e3fd6dcfe6fccc5d3941a39d53496d28a73b9f0e"} Sep 29 22:41:59 crc kubenswrapper[4922]: I0929 22:41:59.453967 4922 generic.go:334] "Generic (PLEG): container finished" podID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerID="b9e43aeebc36c9eef749d97234291a911a1bcc238059d50f3c2e82738188c665" exitCode=0 Sep 29 22:41:59 crc kubenswrapper[4922]: I0929 22:41:59.454052 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" event={"ID":"ca2c2f99-b018-4313-90e5-73ae578f6717","Type":"ContainerDied","Data":"b9e43aeebc36c9eef749d97234291a911a1bcc238059d50f3c2e82738188c665"} Sep 29 22:42:00 crc kubenswrapper[4922]: I0929 22:42:00.826740 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:42:00 crc kubenswrapper[4922]: I0929 22:42:00.934723 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swbr7\" (UniqueName: \"kubernetes.io/projected/ca2c2f99-b018-4313-90e5-73ae578f6717-kube-api-access-swbr7\") pod \"ca2c2f99-b018-4313-90e5-73ae578f6717\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " Sep 29 22:42:00 crc kubenswrapper[4922]: I0929 22:42:00.934780 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-util\") pod \"ca2c2f99-b018-4313-90e5-73ae578f6717\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " Sep 29 22:42:00 crc kubenswrapper[4922]: I0929 22:42:00.934949 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-bundle\") pod \"ca2c2f99-b018-4313-90e5-73ae578f6717\" (UID: \"ca2c2f99-b018-4313-90e5-73ae578f6717\") " Sep 29 22:42:00 crc kubenswrapper[4922]: I0929 22:42:00.937766 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-bundle" (OuterVolumeSpecName: "bundle") pod "ca2c2f99-b018-4313-90e5-73ae578f6717" (UID: "ca2c2f99-b018-4313-90e5-73ae578f6717"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:42:00 crc kubenswrapper[4922]: I0929 22:42:00.944945 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca2c2f99-b018-4313-90e5-73ae578f6717-kube-api-access-swbr7" (OuterVolumeSpecName: "kube-api-access-swbr7") pod "ca2c2f99-b018-4313-90e5-73ae578f6717" (UID: "ca2c2f99-b018-4313-90e5-73ae578f6717"). InnerVolumeSpecName "kube-api-access-swbr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:42:00 crc kubenswrapper[4922]: I0929 22:42:00.945496 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-util" (OuterVolumeSpecName: "util") pod "ca2c2f99-b018-4313-90e5-73ae578f6717" (UID: "ca2c2f99-b018-4313-90e5-73ae578f6717"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:42:01 crc kubenswrapper[4922]: I0929 22:42:01.036108 4922 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:42:01 crc kubenswrapper[4922]: I0929 22:42:01.036139 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swbr7\" (UniqueName: \"kubernetes.io/projected/ca2c2f99-b018-4313-90e5-73ae578f6717-kube-api-access-swbr7\") on node \"crc\" DevicePath \"\"" Sep 29 22:42:01 crc kubenswrapper[4922]: I0929 22:42:01.036154 4922 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ca2c2f99-b018-4313-90e5-73ae578f6717-util\") on node \"crc\" DevicePath \"\"" Sep 29 22:42:01 crc kubenswrapper[4922]: I0929 22:42:01.472618 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" event={"ID":"ca2c2f99-b018-4313-90e5-73ae578f6717","Type":"ContainerDied","Data":"249725733c1cf912fb008832ba46df22df8ef0774a1229ad6cb4ed2d549e52e0"} Sep 29 22:42:01 crc kubenswrapper[4922]: I0929 22:42:01.472850 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="249725733c1cf912fb008832ba46df22df8ef0774a1229ad6cb4ed2d549e52e0" Sep 29 22:42:01 crc kubenswrapper[4922]: I0929 22:42:01.472678 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.981808 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj"] Sep 29 22:42:04 crc kubenswrapper[4922]: E0929 22:42:04.982478 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerName="extract" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.982498 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerName="extract" Sep 29 22:42:04 crc kubenswrapper[4922]: E0929 22:42:04.982521 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerName="pull" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.982531 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerName="pull" Sep 29 22:42:04 crc kubenswrapper[4922]: E0929 22:42:04.982557 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerName="util" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.982565 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerName="util" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.982704 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca2c2f99-b018-4313-90e5-73ae578f6717" containerName="extract" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.983168 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.985238 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.985676 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.985856 4922 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-zl55k" Sep 29 22:42:04 crc kubenswrapper[4922]: I0929 22:42:04.986272 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46tdv\" (UniqueName: \"kubernetes.io/projected/285723a8-a89c-4033-a440-f1937fb8d4a5-kube-api-access-46tdv\") pod \"cert-manager-operator-controller-manager-57cd46d6d-zjhxj\" (UID: \"285723a8-a89c-4033-a440-f1937fb8d4a5\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj" Sep 29 22:42:05 crc kubenswrapper[4922]: I0929 22:42:05.000512 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj"] Sep 29 22:42:05 crc kubenswrapper[4922]: I0929 22:42:05.087779 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46tdv\" (UniqueName: \"kubernetes.io/projected/285723a8-a89c-4033-a440-f1937fb8d4a5-kube-api-access-46tdv\") pod \"cert-manager-operator-controller-manager-57cd46d6d-zjhxj\" (UID: \"285723a8-a89c-4033-a440-f1937fb8d4a5\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj" Sep 29 22:42:05 crc kubenswrapper[4922]: I0929 22:42:05.109514 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46tdv\" (UniqueName: \"kubernetes.io/projected/285723a8-a89c-4033-a440-f1937fb8d4a5-kube-api-access-46tdv\") pod \"cert-manager-operator-controller-manager-57cd46d6d-zjhxj\" (UID: \"285723a8-a89c-4033-a440-f1937fb8d4a5\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj" Sep 29 22:42:05 crc kubenswrapper[4922]: I0929 22:42:05.304843 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj" Sep 29 22:42:05 crc kubenswrapper[4922]: I0929 22:42:05.738711 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj"] Sep 29 22:42:05 crc kubenswrapper[4922]: W0929 22:42:05.745580 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod285723a8_a89c_4033_a440_f1937fb8d4a5.slice/crio-bf1506fd8460648efda7823323e80a40e4496c003282ed9bccab3898be8b8e9e WatchSource:0}: Error finding container bf1506fd8460648efda7823323e80a40e4496c003282ed9bccab3898be8b8e9e: Status 404 returned error can't find the container with id bf1506fd8460648efda7823323e80a40e4496c003282ed9bccab3898be8b8e9e Sep 29 22:42:06 crc kubenswrapper[4922]: I0929 22:42:06.509486 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj" event={"ID":"285723a8-a89c-4033-a440-f1937fb8d4a5","Type":"ContainerStarted","Data":"bf1506fd8460648efda7823323e80a40e4496c003282ed9bccab3898be8b8e9e"} Sep 29 22:42:14 crc kubenswrapper[4922]: I0929 22:42:14.569311 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj" event={"ID":"285723a8-a89c-4033-a440-f1937fb8d4a5","Type":"ContainerStarted","Data":"30d87f7988ba0977563607ca70106c63d84a7849fa5b2a464684ec9915563646"} Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.020080 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-57cd46d6d-zjhxj" podStartSLOduration=6.092166582 podStartE2EDuration="14.020052224s" podCreationTimestamp="2025-09-29 22:42:04 +0000 UTC" firstStartedPulling="2025-09-29 22:42:05.747825946 +0000 UTC m=+930.058114769" lastFinishedPulling="2025-09-29 22:42:13.675711548 +0000 UTC m=+937.986000411" observedRunningTime="2025-09-29 22:42:14.600199793 +0000 UTC m=+938.910488596" watchObservedRunningTime="2025-09-29 22:42:18.020052224 +0000 UTC m=+942.330341077" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.024897 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-2p5h8"] Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.026004 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.029318 4922 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-bk8d6" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.031356 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.032139 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.037038 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-2p5h8"] Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.095619 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58mfk\" (UniqueName: \"kubernetes.io/projected/18349d86-b50f-4900-a5a0-42b7b55f79d6-kube-api-access-58mfk\") pod \"cert-manager-webhook-d969966f-2p5h8\" (UID: \"18349d86-b50f-4900-a5a0-42b7b55f79d6\") " pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.095757 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/18349d86-b50f-4900-a5a0-42b7b55f79d6-bound-sa-token\") pod \"cert-manager-webhook-d969966f-2p5h8\" (UID: \"18349d86-b50f-4900-a5a0-42b7b55f79d6\") " pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.196831 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/18349d86-b50f-4900-a5a0-42b7b55f79d6-bound-sa-token\") pod \"cert-manager-webhook-d969966f-2p5h8\" (UID: \"18349d86-b50f-4900-a5a0-42b7b55f79d6\") " pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.196984 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58mfk\" (UniqueName: \"kubernetes.io/projected/18349d86-b50f-4900-a5a0-42b7b55f79d6-kube-api-access-58mfk\") pod \"cert-manager-webhook-d969966f-2p5h8\" (UID: \"18349d86-b50f-4900-a5a0-42b7b55f79d6\") " pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.221415 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58mfk\" (UniqueName: \"kubernetes.io/projected/18349d86-b50f-4900-a5a0-42b7b55f79d6-kube-api-access-58mfk\") pod \"cert-manager-webhook-d969966f-2p5h8\" (UID: \"18349d86-b50f-4900-a5a0-42b7b55f79d6\") " pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.223186 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/18349d86-b50f-4900-a5a0-42b7b55f79d6-bound-sa-token\") pod \"cert-manager-webhook-d969966f-2p5h8\" (UID: \"18349d86-b50f-4900-a5a0-42b7b55f79d6\") " pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.386679 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.399351 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p"] Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.400470 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.416597 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p"] Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.601487 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnjjc\" (UniqueName: \"kubernetes.io/projected/89e3dc38-6b7e-4449-98cf-e7355b77b7aa-kube-api-access-vnjjc\") pod \"cert-manager-cainjector-7d9f95dbf-mh57p\" (UID: \"89e3dc38-6b7e-4449-98cf-e7355b77b7aa\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.601776 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/89e3dc38-6b7e-4449-98cf-e7355b77b7aa-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-mh57p\" (UID: \"89e3dc38-6b7e-4449-98cf-e7355b77b7aa\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.651692 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-2p5h8"] Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.703495 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnjjc\" (UniqueName: \"kubernetes.io/projected/89e3dc38-6b7e-4449-98cf-e7355b77b7aa-kube-api-access-vnjjc\") pod \"cert-manager-cainjector-7d9f95dbf-mh57p\" (UID: \"89e3dc38-6b7e-4449-98cf-e7355b77b7aa\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.703596 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/89e3dc38-6b7e-4449-98cf-e7355b77b7aa-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-mh57p\" (UID: \"89e3dc38-6b7e-4449-98cf-e7355b77b7aa\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.720908 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnjjc\" (UniqueName: \"kubernetes.io/projected/89e3dc38-6b7e-4449-98cf-e7355b77b7aa-kube-api-access-vnjjc\") pod \"cert-manager-cainjector-7d9f95dbf-mh57p\" (UID: \"89e3dc38-6b7e-4449-98cf-e7355b77b7aa\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.725549 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/89e3dc38-6b7e-4449-98cf-e7355b77b7aa-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-mh57p\" (UID: \"89e3dc38-6b7e-4449-98cf-e7355b77b7aa\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" Sep 29 22:42:18 crc kubenswrapper[4922]: I0929 22:42:18.782926 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" Sep 29 22:42:19 crc kubenswrapper[4922]: I0929 22:42:19.230958 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p"] Sep 29 22:42:19 crc kubenswrapper[4922]: W0929 22:42:19.239625 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89e3dc38_6b7e_4449_98cf_e7355b77b7aa.slice/crio-2baa2c62350d182f7e29b50e44ca1883c2dc52cd25bce837817647f5f8202c3b WatchSource:0}: Error finding container 2baa2c62350d182f7e29b50e44ca1883c2dc52cd25bce837817647f5f8202c3b: Status 404 returned error can't find the container with id 2baa2c62350d182f7e29b50e44ca1883c2dc52cd25bce837817647f5f8202c3b Sep 29 22:42:19 crc kubenswrapper[4922]: I0929 22:42:19.606136 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" event={"ID":"18349d86-b50f-4900-a5a0-42b7b55f79d6","Type":"ContainerStarted","Data":"7348e1a5add33ba27c8c31f751b62d6db0a0cf19f87c331dca6bbdaf049938e9"} Sep 29 22:42:19 crc kubenswrapper[4922]: I0929 22:42:19.606903 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" event={"ID":"89e3dc38-6b7e-4449-98cf-e7355b77b7aa","Type":"ContainerStarted","Data":"2baa2c62350d182f7e29b50e44ca1883c2dc52cd25bce837817647f5f8202c3b"} Sep 29 22:42:23 crc kubenswrapper[4922]: I0929 22:42:23.634725 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" event={"ID":"18349d86-b50f-4900-a5a0-42b7b55f79d6","Type":"ContainerStarted","Data":"631e5eb22fe3bd06b99f61975adf0c4b0895f1241571b034dfd1bd98714d3ca7"} Sep 29 22:42:23 crc kubenswrapper[4922]: I0929 22:42:23.635574 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:23 crc kubenswrapper[4922]: I0929 22:42:23.636658 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" event={"ID":"89e3dc38-6b7e-4449-98cf-e7355b77b7aa","Type":"ContainerStarted","Data":"697163f4bb7860009d854b12c495c84bd3112a4d502e9bdc5e98b9e88610cebd"} Sep 29 22:42:23 crc kubenswrapper[4922]: I0929 22:42:23.664706 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" podStartSLOduration=1.252504051 podStartE2EDuration="5.664678595s" podCreationTimestamp="2025-09-29 22:42:18 +0000 UTC" firstStartedPulling="2025-09-29 22:42:18.66687404 +0000 UTC m=+942.977162853" lastFinishedPulling="2025-09-29 22:42:23.079048574 +0000 UTC m=+947.389337397" observedRunningTime="2025-09-29 22:42:23.656859783 +0000 UTC m=+947.967148626" watchObservedRunningTime="2025-09-29 22:42:23.664678595 +0000 UTC m=+947.974967448" Sep 29 22:42:23 crc kubenswrapper[4922]: I0929 22:42:23.675584 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-mh57p" podStartSLOduration=1.813703472 podStartE2EDuration="5.675568593s" podCreationTimestamp="2025-09-29 22:42:18 +0000 UTC" firstStartedPulling="2025-09-29 22:42:19.242202039 +0000 UTC m=+943.552490862" lastFinishedPulling="2025-09-29 22:42:23.10406713 +0000 UTC m=+947.414355983" observedRunningTime="2025-09-29 22:42:23.670708433 +0000 UTC m=+947.980997266" watchObservedRunningTime="2025-09-29 22:42:23.675568593 +0000 UTC m=+947.985857416" Sep 29 22:42:28 crc kubenswrapper[4922]: I0929 22:42:28.391022 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-d969966f-2p5h8" Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.732619 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-bns8p"] Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.735287 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.739044 4922 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-6h4xt" Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.784571 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-bns8p"] Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.861312 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/344eedc4-9955-40cd-9366-e7249d7a6aa7-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-bns8p\" (UID: \"344eedc4-9955-40cd-9366-e7249d7a6aa7\") " pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.861440 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtcd8\" (UniqueName: \"kubernetes.io/projected/344eedc4-9955-40cd-9366-e7249d7a6aa7-kube-api-access-mtcd8\") pod \"cert-manager-7d4cc89fcb-bns8p\" (UID: \"344eedc4-9955-40cd-9366-e7249d7a6aa7\") " pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.962798 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/344eedc4-9955-40cd-9366-e7249d7a6aa7-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-bns8p\" (UID: \"344eedc4-9955-40cd-9366-e7249d7a6aa7\") " pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.962863 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtcd8\" (UniqueName: \"kubernetes.io/projected/344eedc4-9955-40cd-9366-e7249d7a6aa7-kube-api-access-mtcd8\") pod \"cert-manager-7d4cc89fcb-bns8p\" (UID: \"344eedc4-9955-40cd-9366-e7249d7a6aa7\") " pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.995927 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/344eedc4-9955-40cd-9366-e7249d7a6aa7-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-bns8p\" (UID: \"344eedc4-9955-40cd-9366-e7249d7a6aa7\") " pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" Sep 29 22:42:35 crc kubenswrapper[4922]: I0929 22:42:35.996459 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtcd8\" (UniqueName: \"kubernetes.io/projected/344eedc4-9955-40cd-9366-e7249d7a6aa7-kube-api-access-mtcd8\") pod \"cert-manager-7d4cc89fcb-bns8p\" (UID: \"344eedc4-9955-40cd-9366-e7249d7a6aa7\") " pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" Sep 29 22:42:36 crc kubenswrapper[4922]: I0929 22:42:36.094673 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" Sep 29 22:42:36 crc kubenswrapper[4922]: I0929 22:42:36.585364 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-bns8p"] Sep 29 22:42:36 crc kubenswrapper[4922]: W0929 22:42:36.591995 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod344eedc4_9955_40cd_9366_e7249d7a6aa7.slice/crio-a6a0ba47bd6d7192f09f261c8cd0c18537d5006e68ba5bb57899a375d3345952 WatchSource:0}: Error finding container a6a0ba47bd6d7192f09f261c8cd0c18537d5006e68ba5bb57899a375d3345952: Status 404 returned error can't find the container with id a6a0ba47bd6d7192f09f261c8cd0c18537d5006e68ba5bb57899a375d3345952 Sep 29 22:42:36 crc kubenswrapper[4922]: I0929 22:42:36.747993 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" event={"ID":"344eedc4-9955-40cd-9366-e7249d7a6aa7","Type":"ContainerStarted","Data":"a6a0ba47bd6d7192f09f261c8cd0c18537d5006e68ba5bb57899a375d3345952"} Sep 29 22:42:37 crc kubenswrapper[4922]: I0929 22:42:37.759905 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" event={"ID":"344eedc4-9955-40cd-9366-e7249d7a6aa7","Type":"ContainerStarted","Data":"d09c7f46ee8eab4758b01febedb814785152cd7becca22420cef45984dd675b3"} Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.595464 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-7d4cc89fcb-bns8p" podStartSLOduration=7.5954452329999995 podStartE2EDuration="7.595445233s" podCreationTimestamp="2025-09-29 22:42:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:42:37.796087376 +0000 UTC m=+962.106376229" watchObservedRunningTime="2025-09-29 22:42:42.595445233 +0000 UTC m=+966.905734056" Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.599244 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-l9g49"] Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.600000 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-l9g49" Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.602483 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-h7kb6" Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.602696 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.606775 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.665439 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-l9g49"] Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.771946 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w58dk\" (UniqueName: \"kubernetes.io/projected/0cbeead1-c6fb-4f62-8573-c689333222b9-kube-api-access-w58dk\") pod \"openstack-operator-index-l9g49\" (UID: \"0cbeead1-c6fb-4f62-8573-c689333222b9\") " pod="openstack-operators/openstack-operator-index-l9g49" Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.873470 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w58dk\" (UniqueName: \"kubernetes.io/projected/0cbeead1-c6fb-4f62-8573-c689333222b9-kube-api-access-w58dk\") pod \"openstack-operator-index-l9g49\" (UID: \"0cbeead1-c6fb-4f62-8573-c689333222b9\") " pod="openstack-operators/openstack-operator-index-l9g49" Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.898277 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w58dk\" (UniqueName: \"kubernetes.io/projected/0cbeead1-c6fb-4f62-8573-c689333222b9-kube-api-access-w58dk\") pod \"openstack-operator-index-l9g49\" (UID: \"0cbeead1-c6fb-4f62-8573-c689333222b9\") " pod="openstack-operators/openstack-operator-index-l9g49" Sep 29 22:42:42 crc kubenswrapper[4922]: I0929 22:42:42.927598 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-l9g49" Sep 29 22:42:43 crc kubenswrapper[4922]: I0929 22:42:43.234968 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-l9g49"] Sep 29 22:42:43 crc kubenswrapper[4922]: W0929 22:42:43.246223 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cbeead1_c6fb_4f62_8573_c689333222b9.slice/crio-55cfc5f8873081b67ee69d699cba4819bf34ae325162ba4fc97f8f8ae8005b4e WatchSource:0}: Error finding container 55cfc5f8873081b67ee69d699cba4819bf34ae325162ba4fc97f8f8ae8005b4e: Status 404 returned error can't find the container with id 55cfc5f8873081b67ee69d699cba4819bf34ae325162ba4fc97f8f8ae8005b4e Sep 29 22:42:43 crc kubenswrapper[4922]: I0929 22:42:43.810207 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-l9g49" event={"ID":"0cbeead1-c6fb-4f62-8573-c689333222b9","Type":"ContainerStarted","Data":"55cfc5f8873081b67ee69d699cba4819bf34ae325162ba4fc97f8f8ae8005b4e"} Sep 29 22:42:45 crc kubenswrapper[4922]: I0929 22:42:45.961664 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-l9g49"] Sep 29 22:42:46 crc kubenswrapper[4922]: I0929 22:42:46.564385 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-ts4cc"] Sep 29 22:42:46 crc kubenswrapper[4922]: I0929 22:42:46.565075 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ts4cc" Sep 29 22:42:46 crc kubenswrapper[4922]: I0929 22:42:46.580178 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ts4cc"] Sep 29 22:42:46 crc kubenswrapper[4922]: I0929 22:42:46.742327 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8225\" (UniqueName: \"kubernetes.io/projected/915c46b2-3abe-4a77-8d50-03fecbbf6575-kube-api-access-f8225\") pod \"openstack-operator-index-ts4cc\" (UID: \"915c46b2-3abe-4a77-8d50-03fecbbf6575\") " pod="openstack-operators/openstack-operator-index-ts4cc" Sep 29 22:42:46 crc kubenswrapper[4922]: I0929 22:42:46.843621 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8225\" (UniqueName: \"kubernetes.io/projected/915c46b2-3abe-4a77-8d50-03fecbbf6575-kube-api-access-f8225\") pod \"openstack-operator-index-ts4cc\" (UID: \"915c46b2-3abe-4a77-8d50-03fecbbf6575\") " pod="openstack-operators/openstack-operator-index-ts4cc" Sep 29 22:42:46 crc kubenswrapper[4922]: I0929 22:42:46.867155 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8225\" (UniqueName: \"kubernetes.io/projected/915c46b2-3abe-4a77-8d50-03fecbbf6575-kube-api-access-f8225\") pod \"openstack-operator-index-ts4cc\" (UID: \"915c46b2-3abe-4a77-8d50-03fecbbf6575\") " pod="openstack-operators/openstack-operator-index-ts4cc" Sep 29 22:42:46 crc kubenswrapper[4922]: I0929 22:42:46.889643 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-ts4cc" Sep 29 22:42:47 crc kubenswrapper[4922]: I0929 22:42:47.924956 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-ts4cc"] Sep 29 22:42:47 crc kubenswrapper[4922]: W0929 22:42:47.933019 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod915c46b2_3abe_4a77_8d50_03fecbbf6575.slice/crio-b403fc174b262e7dc0a4c950ebb420b2ed38c74ca717c5f3e175d763e4acff6a WatchSource:0}: Error finding container b403fc174b262e7dc0a4c950ebb420b2ed38c74ca717c5f3e175d763e4acff6a: Status 404 returned error can't find the container with id b403fc174b262e7dc0a4c950ebb420b2ed38c74ca717c5f3e175d763e4acff6a Sep 29 22:42:48 crc kubenswrapper[4922]: I0929 22:42:48.849262 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ts4cc" event={"ID":"915c46b2-3abe-4a77-8d50-03fecbbf6575","Type":"ContainerStarted","Data":"fd73fd323dfb5d40e9a0fcd6a486ee9a0bbe56112d8158b815c30872df37c386"} Sep 29 22:42:48 crc kubenswrapper[4922]: I0929 22:42:48.849344 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-ts4cc" event={"ID":"915c46b2-3abe-4a77-8d50-03fecbbf6575","Type":"ContainerStarted","Data":"b403fc174b262e7dc0a4c950ebb420b2ed38c74ca717c5f3e175d763e4acff6a"} Sep 29 22:42:48 crc kubenswrapper[4922]: I0929 22:42:48.851859 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-l9g49" event={"ID":"0cbeead1-c6fb-4f62-8573-c689333222b9","Type":"ContainerStarted","Data":"80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c"} Sep 29 22:42:48 crc kubenswrapper[4922]: I0929 22:42:48.851987 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-l9g49" podUID="0cbeead1-c6fb-4f62-8573-c689333222b9" containerName="registry-server" containerID="cri-o://80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c" gracePeriod=2 Sep 29 22:42:48 crc kubenswrapper[4922]: I0929 22:42:48.870778 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-ts4cc" podStartSLOduration=2.82663924 podStartE2EDuration="2.870757265s" podCreationTimestamp="2025-09-29 22:42:46 +0000 UTC" firstStartedPulling="2025-09-29 22:42:47.937095555 +0000 UTC m=+972.247384408" lastFinishedPulling="2025-09-29 22:42:47.98121362 +0000 UTC m=+972.291502433" observedRunningTime="2025-09-29 22:42:48.869803631 +0000 UTC m=+973.180092474" watchObservedRunningTime="2025-09-29 22:42:48.870757265 +0000 UTC m=+973.181046108" Sep 29 22:42:48 crc kubenswrapper[4922]: I0929 22:42:48.897051 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-l9g49" podStartSLOduration=2.428431819 podStartE2EDuration="6.897022871s" podCreationTimestamp="2025-09-29 22:42:42 +0000 UTC" firstStartedPulling="2025-09-29 22:42:43.248583155 +0000 UTC m=+967.558871968" lastFinishedPulling="2025-09-29 22:42:47.717174167 +0000 UTC m=+972.027463020" observedRunningTime="2025-09-29 22:42:48.890158572 +0000 UTC m=+973.200447455" watchObservedRunningTime="2025-09-29 22:42:48.897022871 +0000 UTC m=+973.207311724" Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.325764 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-l9g49" Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.391109 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w58dk\" (UniqueName: \"kubernetes.io/projected/0cbeead1-c6fb-4f62-8573-c689333222b9-kube-api-access-w58dk\") pod \"0cbeead1-c6fb-4f62-8573-c689333222b9\" (UID: \"0cbeead1-c6fb-4f62-8573-c689333222b9\") " Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.395422 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cbeead1-c6fb-4f62-8573-c689333222b9-kube-api-access-w58dk" (OuterVolumeSpecName: "kube-api-access-w58dk") pod "0cbeead1-c6fb-4f62-8573-c689333222b9" (UID: "0cbeead1-c6fb-4f62-8573-c689333222b9"). InnerVolumeSpecName "kube-api-access-w58dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.493022 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w58dk\" (UniqueName: \"kubernetes.io/projected/0cbeead1-c6fb-4f62-8573-c689333222b9-kube-api-access-w58dk\") on node \"crc\" DevicePath \"\"" Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.862286 4922 generic.go:334] "Generic (PLEG): container finished" podID="0cbeead1-c6fb-4f62-8573-c689333222b9" containerID="80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c" exitCode=0 Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.862339 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-l9g49" Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.862451 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-l9g49" event={"ID":"0cbeead1-c6fb-4f62-8573-c689333222b9","Type":"ContainerDied","Data":"80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c"} Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.862494 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-l9g49" event={"ID":"0cbeead1-c6fb-4f62-8573-c689333222b9","Type":"ContainerDied","Data":"55cfc5f8873081b67ee69d699cba4819bf34ae325162ba4fc97f8f8ae8005b4e"} Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.862525 4922 scope.go:117] "RemoveContainer" containerID="80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c" Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.890972 4922 scope.go:117] "RemoveContainer" containerID="80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c" Sep 29 22:42:49 crc kubenswrapper[4922]: E0929 22:42:49.892787 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c\": container with ID starting with 80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c not found: ID does not exist" containerID="80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c" Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.892837 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c"} err="failed to get container status \"80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c\": rpc error: code = NotFound desc = could not find container \"80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c\": container with ID starting with 80dd7bef6d6a94afa2832687e20e70796cb35f79ba03d6633022ce9b33dc3f8c not found: ID does not exist" Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.916286 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-l9g49"] Sep 29 22:42:49 crc kubenswrapper[4922]: I0929 22:42:49.923534 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-l9g49"] Sep 29 22:42:50 crc kubenswrapper[4922]: I0929 22:42:50.434929 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cbeead1-c6fb-4f62-8573-c689333222b9" path="/var/lib/kubelet/pods/0cbeead1-c6fb-4f62-8573-c689333222b9/volumes" Sep 29 22:42:56 crc kubenswrapper[4922]: I0929 22:42:56.889855 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-ts4cc" Sep 29 22:42:56 crc kubenswrapper[4922]: I0929 22:42:56.890661 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-ts4cc" Sep 29 22:42:56 crc kubenswrapper[4922]: I0929 22:42:56.931145 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-ts4cc" Sep 29 22:42:56 crc kubenswrapper[4922]: I0929 22:42:56.975066 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-ts4cc" Sep 29 22:42:58 crc kubenswrapper[4922]: I0929 22:42:58.912636 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:42:58 crc kubenswrapper[4922]: I0929 22:42:58.913031 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.687515 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt"] Sep 29 22:43:02 crc kubenswrapper[4922]: E0929 22:43:02.688374 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cbeead1-c6fb-4f62-8573-c689333222b9" containerName="registry-server" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.688412 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cbeead1-c6fb-4f62-8573-c689333222b9" containerName="registry-server" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.688566 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cbeead1-c6fb-4f62-8573-c689333222b9" containerName="registry-server" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.689626 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.692192 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-8chh4" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.698198 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqw6n\" (UniqueName: \"kubernetes.io/projected/9845266c-41aa-45af-86ac-cc5cc4cd163e-kube-api-access-hqw6n\") pod \"2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.698333 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-bundle\") pod \"2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.698443 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-util\") pod \"2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.703714 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt"] Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.800313 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqw6n\" (UniqueName: \"kubernetes.io/projected/9845266c-41aa-45af-86ac-cc5cc4cd163e-kube-api-access-hqw6n\") pod \"2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.800380 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-bundle\") pod \"2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.800447 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-util\") pod \"2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.800932 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-util\") pod \"2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.801172 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-bundle\") pod \"2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:02 crc kubenswrapper[4922]: I0929 22:43:02.838458 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqw6n\" (UniqueName: \"kubernetes.io/projected/9845266c-41aa-45af-86ac-cc5cc4cd163e-kube-api-access-hqw6n\") pod \"2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:03 crc kubenswrapper[4922]: I0929 22:43:03.019804 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:03 crc kubenswrapper[4922]: I0929 22:43:03.502443 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt"] Sep 29 22:43:03 crc kubenswrapper[4922]: W0929 22:43:03.509126 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9845266c_41aa_45af_86ac_cc5cc4cd163e.slice/crio-2b1d9dd84ee773fe0574e372332d9c6190f2d6885bcaba3dbd6fbb5b431f8d2f WatchSource:0}: Error finding container 2b1d9dd84ee773fe0574e372332d9c6190f2d6885bcaba3dbd6fbb5b431f8d2f: Status 404 returned error can't find the container with id 2b1d9dd84ee773fe0574e372332d9c6190f2d6885bcaba3dbd6fbb5b431f8d2f Sep 29 22:43:03 crc kubenswrapper[4922]: I0929 22:43:03.972614 4922 generic.go:334] "Generic (PLEG): container finished" podID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerID="c419577c092e92620da62409e6a921646d852d647b52a6b5d6d9ecfd49ea785d" exitCode=0 Sep 29 22:43:03 crc kubenswrapper[4922]: I0929 22:43:03.973182 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" event={"ID":"9845266c-41aa-45af-86ac-cc5cc4cd163e","Type":"ContainerDied","Data":"c419577c092e92620da62409e6a921646d852d647b52a6b5d6d9ecfd49ea785d"} Sep 29 22:43:03 crc kubenswrapper[4922]: I0929 22:43:03.973223 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" event={"ID":"9845266c-41aa-45af-86ac-cc5cc4cd163e","Type":"ContainerStarted","Data":"2b1d9dd84ee773fe0574e372332d9c6190f2d6885bcaba3dbd6fbb5b431f8d2f"} Sep 29 22:43:04 crc kubenswrapper[4922]: I0929 22:43:04.984303 4922 generic.go:334] "Generic (PLEG): container finished" podID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerID="210d3154893738efeec4002bdbd5f1a0c2b46cb2f5723dadaef976101cc9a253" exitCode=0 Sep 29 22:43:04 crc kubenswrapper[4922]: I0929 22:43:04.984357 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" event={"ID":"9845266c-41aa-45af-86ac-cc5cc4cd163e","Type":"ContainerDied","Data":"210d3154893738efeec4002bdbd5f1a0c2b46cb2f5723dadaef976101cc9a253"} Sep 29 22:43:05 crc kubenswrapper[4922]: I0929 22:43:05.997501 4922 generic.go:334] "Generic (PLEG): container finished" podID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerID="4a5b14f89355dd509aa0a310563f432c2c20a9cece374bfdba855d1cdc0c23f0" exitCode=0 Sep 29 22:43:05 crc kubenswrapper[4922]: I0929 22:43:05.997581 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" event={"ID":"9845266c-41aa-45af-86ac-cc5cc4cd163e","Type":"ContainerDied","Data":"4a5b14f89355dd509aa0a310563f432c2c20a9cece374bfdba855d1cdc0c23f0"} Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.296201 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.383656 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-util\") pod \"9845266c-41aa-45af-86ac-cc5cc4cd163e\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.383767 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-bundle\") pod \"9845266c-41aa-45af-86ac-cc5cc4cd163e\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.383855 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqw6n\" (UniqueName: \"kubernetes.io/projected/9845266c-41aa-45af-86ac-cc5cc4cd163e-kube-api-access-hqw6n\") pod \"9845266c-41aa-45af-86ac-cc5cc4cd163e\" (UID: \"9845266c-41aa-45af-86ac-cc5cc4cd163e\") " Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.386205 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-bundle" (OuterVolumeSpecName: "bundle") pod "9845266c-41aa-45af-86ac-cc5cc4cd163e" (UID: "9845266c-41aa-45af-86ac-cc5cc4cd163e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.392906 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9845266c-41aa-45af-86ac-cc5cc4cd163e-kube-api-access-hqw6n" (OuterVolumeSpecName: "kube-api-access-hqw6n") pod "9845266c-41aa-45af-86ac-cc5cc4cd163e" (UID: "9845266c-41aa-45af-86ac-cc5cc4cd163e"). InnerVolumeSpecName "kube-api-access-hqw6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.414373 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-util" (OuterVolumeSpecName: "util") pod "9845266c-41aa-45af-86ac-cc5cc4cd163e" (UID: "9845266c-41aa-45af-86ac-cc5cc4cd163e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.485318 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqw6n\" (UniqueName: \"kubernetes.io/projected/9845266c-41aa-45af-86ac-cc5cc4cd163e-kube-api-access-hqw6n\") on node \"crc\" DevicePath \"\"" Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.485355 4922 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-util\") on node \"crc\" DevicePath \"\"" Sep 29 22:43:07 crc kubenswrapper[4922]: I0929 22:43:07.485368 4922 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9845266c-41aa-45af-86ac-cc5cc4cd163e-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:43:08 crc kubenswrapper[4922]: I0929 22:43:08.018106 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" event={"ID":"9845266c-41aa-45af-86ac-cc5cc4cd163e","Type":"ContainerDied","Data":"2b1d9dd84ee773fe0574e372332d9c6190f2d6885bcaba3dbd6fbb5b431f8d2f"} Sep 29 22:43:08 crc kubenswrapper[4922]: I0929 22:43:08.018166 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b1d9dd84ee773fe0574e372332d9c6190f2d6885bcaba3dbd6fbb5b431f8d2f" Sep 29 22:43:08 crc kubenswrapper[4922]: I0929 22:43:08.018204 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.290057 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h"] Sep 29 22:43:15 crc kubenswrapper[4922]: E0929 22:43:15.291014 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerName="pull" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.291036 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerName="pull" Sep 29 22:43:15 crc kubenswrapper[4922]: E0929 22:43:15.291059 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerName="util" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.291072 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerName="util" Sep 29 22:43:15 crc kubenswrapper[4922]: E0929 22:43:15.291086 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerName="extract" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.291099 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerName="extract" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.291308 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9845266c-41aa-45af-86ac-cc5cc4cd163e" containerName="extract" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.292431 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.295041 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-7fkhr" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.304071 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7kwd\" (UniqueName: \"kubernetes.io/projected/9ab5099d-a53a-4b25-80a1-a976963dab46-kube-api-access-p7kwd\") pod \"openstack-operator-controller-operator-5856b6f896-mmf2h\" (UID: \"9ab5099d-a53a-4b25-80a1-a976963dab46\") " pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.334820 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h"] Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.405559 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7kwd\" (UniqueName: \"kubernetes.io/projected/9ab5099d-a53a-4b25-80a1-a976963dab46-kube-api-access-p7kwd\") pod \"openstack-operator-controller-operator-5856b6f896-mmf2h\" (UID: \"9ab5099d-a53a-4b25-80a1-a976963dab46\") " pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.426077 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7kwd\" (UniqueName: \"kubernetes.io/projected/9ab5099d-a53a-4b25-80a1-a976963dab46-kube-api-access-p7kwd\") pod \"openstack-operator-controller-operator-5856b6f896-mmf2h\" (UID: \"9ab5099d-a53a-4b25-80a1-a976963dab46\") " pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.611430 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" Sep 29 22:43:15 crc kubenswrapper[4922]: I0929 22:43:15.892274 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h"] Sep 29 22:43:16 crc kubenswrapper[4922]: I0929 22:43:16.080596 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" event={"ID":"9ab5099d-a53a-4b25-80a1-a976963dab46","Type":"ContainerStarted","Data":"c44ed902d8bf71ae39a83c04044b924f45e9c2aa170fc9f5d9de45ef3098aae8"} Sep 29 22:43:20 crc kubenswrapper[4922]: I0929 22:43:20.108494 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" event={"ID":"9ab5099d-a53a-4b25-80a1-a976963dab46","Type":"ContainerStarted","Data":"f9b118957cbbd735056e0168deef1227f9c995a7fb2db069b8549ddc73c52f6b"} Sep 29 22:43:23 crc kubenswrapper[4922]: I0929 22:43:23.131077 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" event={"ID":"9ab5099d-a53a-4b25-80a1-a976963dab46","Type":"ContainerStarted","Data":"ba4d4f221482603b4c8a51947de6dc64a3cbf93e907296dd5f4b0daa51c1fe88"} Sep 29 22:43:23 crc kubenswrapper[4922]: I0929 22:43:23.131497 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" Sep 29 22:43:23 crc kubenswrapper[4922]: I0929 22:43:23.195482 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" podStartSLOduration=1.487856111 podStartE2EDuration="8.195453063s" podCreationTimestamp="2025-09-29 22:43:15 +0000 UTC" firstStartedPulling="2025-09-29 22:43:15.903563522 +0000 UTC m=+1000.213852335" lastFinishedPulling="2025-09-29 22:43:22.611160464 +0000 UTC m=+1006.921449287" observedRunningTime="2025-09-29 22:43:23.182286539 +0000 UTC m=+1007.492575392" watchObservedRunningTime="2025-09-29 22:43:23.195453063 +0000 UTC m=+1007.505741916" Sep 29 22:43:24 crc kubenswrapper[4922]: I0929 22:43:24.140866 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-5856b6f896-mmf2h" Sep 29 22:43:28 crc kubenswrapper[4922]: I0929 22:43:28.912416 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:43:28 crc kubenswrapper[4922]: I0929 22:43:28.912783 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.385626 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.387989 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.390636 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.392081 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.394201 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-62pxm" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.406003 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.406224 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-cxsqp" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.407232 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvhsl\" (UniqueName: \"kubernetes.io/projected/7b4517a9-f6ca-4209-9c59-a862b207ee30-kube-api-access-kvhsl\") pod \"cinder-operator-controller-manager-644bddb6d8-dtws2\" (UID: \"7b4517a9-f6ca-4209-9c59-a862b207ee30\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.407306 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsbrv\" (UniqueName: \"kubernetes.io/projected/b7651f52-4ceb-4d53-b74a-dfb7da473f68-kube-api-access-gsbrv\") pod \"barbican-operator-controller-manager-6ff8b75857-bfzxs\" (UID: \"b7651f52-4ceb-4d53-b74a-dfb7da473f68\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.409698 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.438670 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.440056 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.442841 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-7898g" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.446066 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.460985 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.462033 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.469267 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-rkczw" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.469512 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.480655 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.490473 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-dkf8k" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.504842 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.515117 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v88vw\" (UniqueName: \"kubernetes.io/projected/7bb037db-f6bf-4a16-918f-153b149b9ab4-kube-api-access-v88vw\") pod \"glance-operator-controller-manager-84958c4d49-nvw97\" (UID: \"7bb037db-f6bf-4a16-918f-153b149b9ab4\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.515193 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmh9r\" (UniqueName: \"kubernetes.io/projected/44863fa1-d920-42fc-a5d2-197762fe8c37-kube-api-access-kmh9r\") pod \"heat-operator-controller-manager-5d889d78cf-trqhs\" (UID: \"44863fa1-d920-42fc-a5d2-197762fe8c37\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.515288 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7rmd\" (UniqueName: \"kubernetes.io/projected/10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89-kube-api-access-w7rmd\") pod \"designate-operator-controller-manager-84f4f7b77b-7b99q\" (UID: \"10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.515382 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvhsl\" (UniqueName: \"kubernetes.io/projected/7b4517a9-f6ca-4209-9c59-a862b207ee30-kube-api-access-kvhsl\") pod \"cinder-operator-controller-manager-644bddb6d8-dtws2\" (UID: \"7b4517a9-f6ca-4209-9c59-a862b207ee30\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.515485 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsbrv\" (UniqueName: \"kubernetes.io/projected/b7651f52-4ceb-4d53-b74a-dfb7da473f68-kube-api-access-gsbrv\") pod \"barbican-operator-controller-manager-6ff8b75857-bfzxs\" (UID: \"b7651f52-4ceb-4d53-b74a-dfb7da473f68\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.551709 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvhsl\" (UniqueName: \"kubernetes.io/projected/7b4517a9-f6ca-4209-9c59-a862b207ee30-kube-api-access-kvhsl\") pod \"cinder-operator-controller-manager-644bddb6d8-dtws2\" (UID: \"7b4517a9-f6ca-4209-9c59-a862b207ee30\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.561738 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.562354 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsbrv\" (UniqueName: \"kubernetes.io/projected/b7651f52-4ceb-4d53-b74a-dfb7da473f68-kube-api-access-gsbrv\") pod \"barbican-operator-controller-manager-6ff8b75857-bfzxs\" (UID: \"b7651f52-4ceb-4d53-b74a-dfb7da473f68\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.584896 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.586094 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.590590 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-tptbt" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.595518 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.597346 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.605322 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-b5d8j" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.605664 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.612969 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.614679 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.618011 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-thvsz" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.619030 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7rmd\" (UniqueName: \"kubernetes.io/projected/10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89-kube-api-access-w7rmd\") pod \"designate-operator-controller-manager-84f4f7b77b-7b99q\" (UID: \"10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.619128 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v88vw\" (UniqueName: \"kubernetes.io/projected/7bb037db-f6bf-4a16-918f-153b149b9ab4-kube-api-access-v88vw\") pod \"glance-operator-controller-manager-84958c4d49-nvw97\" (UID: \"7bb037db-f6bf-4a16-918f-153b149b9ab4\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.619163 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmh9r\" (UniqueName: \"kubernetes.io/projected/44863fa1-d920-42fc-a5d2-197762fe8c37-kube-api-access-kmh9r\") pod \"heat-operator-controller-manager-5d889d78cf-trqhs\" (UID: \"44863fa1-d920-42fc-a5d2-197762fe8c37\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.632807 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.646248 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.646560 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7rmd\" (UniqueName: \"kubernetes.io/projected/10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89-kube-api-access-w7rmd\") pod \"designate-operator-controller-manager-84f4f7b77b-7b99q\" (UID: \"10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.664034 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmh9r\" (UniqueName: \"kubernetes.io/projected/44863fa1-d920-42fc-a5d2-197762fe8c37-kube-api-access-kmh9r\") pod \"heat-operator-controller-manager-5d889d78cf-trqhs\" (UID: \"44863fa1-d920-42fc-a5d2-197762fe8c37\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.664848 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v88vw\" (UniqueName: \"kubernetes.io/projected/7bb037db-f6bf-4a16-918f-153b149b9ab4-kube-api-access-v88vw\") pod \"glance-operator-controller-manager-84958c4d49-nvw97\" (UID: \"7bb037db-f6bf-4a16-918f-153b149b9ab4\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.666562 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.670222 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.672035 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.676839 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.678529 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.681108 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-jp57k" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.681895 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-49rdw" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.692422 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.693699 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.696244 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-ckv6c" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.697462 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.710425 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.715772 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.719764 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a7fd9019-83a0-41a0-8380-fac36130cb3d-cert\") pod \"infra-operator-controller-manager-7d857cc749-7mtt9\" (UID: \"a7fd9019-83a0-41a0-8380-fac36130cb3d\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.719812 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwqm7\" (UniqueName: \"kubernetes.io/projected/a7fd9019-83a0-41a0-8380-fac36130cb3d-kube-api-access-rwqm7\") pod \"infra-operator-controller-manager-7d857cc749-7mtt9\" (UID: \"a7fd9019-83a0-41a0-8380-fac36130cb3d\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.719862 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzpfd\" (UniqueName: \"kubernetes.io/projected/d6777dc8-0849-4744-bc01-7f790064dcfe-kube-api-access-jzpfd\") pod \"ironic-operator-controller-manager-7975b88857-dpnvq\" (UID: \"d6777dc8-0849-4744-bc01-7f790064dcfe\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.719896 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfcrw\" (UniqueName: \"kubernetes.io/projected/2fc57cef-6bbd-4925-82a9-0efb9622aa81-kube-api-access-wfcrw\") pod \"horizon-operator-controller-manager-9f4696d94-tplbn\" (UID: \"2fc57cef-6bbd-4925-82a9-0efb9622aa81\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.719976 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.728756 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.729953 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.730853 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.743854 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.744871 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.752605 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.764189 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-nz86p" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.764494 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-c2bq6" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.784712 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.791204 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.804615 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.818267 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.819729 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.820950 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.823166 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfwv7\" (UniqueName: \"kubernetes.io/projected/623e3bae-ed71-479d-8ea3-ca0ca035a8a3-kube-api-access-kfwv7\") pod \"manila-operator-controller-manager-6d68dbc695-r2mxk\" (UID: \"623e3bae-ed71-479d-8ea3-ca0ca035a8a3\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.823207 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a7fd9019-83a0-41a0-8380-fac36130cb3d-cert\") pod \"infra-operator-controller-manager-7d857cc749-7mtt9\" (UID: \"a7fd9019-83a0-41a0-8380-fac36130cb3d\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.823234 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw7l9\" (UniqueName: \"kubernetes.io/projected/ce8f2c94-c958-4874-a2b8-9b3ee2ca943f-kube-api-access-vw7l9\") pod \"mariadb-operator-controller-manager-88c7-dwpq7\" (UID: \"ce8f2c94-c958-4874-a2b8-9b3ee2ca943f\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.823255 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwqm7\" (UniqueName: \"kubernetes.io/projected/a7fd9019-83a0-41a0-8380-fac36130cb3d-kube-api-access-rwqm7\") pod \"infra-operator-controller-manager-7d857cc749-7mtt9\" (UID: \"a7fd9019-83a0-41a0-8380-fac36130cb3d\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.823292 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj54m\" (UniqueName: \"kubernetes.io/projected/4347835b-b3fa-40b5-b227-43c9da18c8d1-kube-api-access-dj54m\") pod \"keystone-operator-controller-manager-5bd55b4bff-cx7lp\" (UID: \"4347835b-b3fa-40b5-b227-43c9da18c8d1\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.823323 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzpfd\" (UniqueName: \"kubernetes.io/projected/d6777dc8-0849-4744-bc01-7f790064dcfe-kube-api-access-jzpfd\") pod \"ironic-operator-controller-manager-7975b88857-dpnvq\" (UID: \"d6777dc8-0849-4744-bc01-7f790064dcfe\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.823356 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfcrw\" (UniqueName: \"kubernetes.io/projected/2fc57cef-6bbd-4925-82a9-0efb9622aa81-kube-api-access-wfcrw\") pod \"horizon-operator-controller-manager-9f4696d94-tplbn\" (UID: \"2fc57cef-6bbd-4925-82a9-0efb9622aa81\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.823384 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vkmq\" (UniqueName: \"kubernetes.io/projected/851f4d4d-a27d-4fb8-9d26-9ea61e2eb423-kube-api-access-5vkmq\") pod \"neutron-operator-controller-manager-64d7b59854-vxc2x\" (UID: \"851f4d4d-a27d-4fb8-9d26-9ea61e2eb423\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" Sep 29 22:43:41 crc kubenswrapper[4922]: E0929 22:43:41.824841 4922 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.824882 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-xgrtx" Sep 29 22:43:41 crc kubenswrapper[4922]: E0929 22:43:41.824914 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a7fd9019-83a0-41a0-8380-fac36130cb3d-cert podName:a7fd9019-83a0-41a0-8380-fac36130cb3d nodeName:}" failed. No retries permitted until 2025-09-29 22:43:42.324894417 +0000 UTC m=+1026.635183230 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a7fd9019-83a0-41a0-8380-fac36130cb3d-cert") pod "infra-operator-controller-manager-7d857cc749-7mtt9" (UID: "a7fd9019-83a0-41a0-8380-fac36130cb3d") : secret "infra-operator-webhook-server-cert" not found Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.842254 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.843390 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.850979 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-tjgnm" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.851175 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.865368 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.877488 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.896511 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzpfd\" (UniqueName: \"kubernetes.io/projected/d6777dc8-0849-4744-bc01-7f790064dcfe-kube-api-access-jzpfd\") pod \"ironic-operator-controller-manager-7975b88857-dpnvq\" (UID: \"d6777dc8-0849-4744-bc01-7f790064dcfe\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.897136 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.899678 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfcrw\" (UniqueName: \"kubernetes.io/projected/2fc57cef-6bbd-4925-82a9-0efb9622aa81-kube-api-access-wfcrw\") pod \"horizon-operator-controller-manager-9f4696d94-tplbn\" (UID: \"2fc57cef-6bbd-4925-82a9-0efb9622aa81\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.900485 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.914900 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.916481 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-kjdlm" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.936267 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv"] Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.937198 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwqm7\" (UniqueName: \"kubernetes.io/projected/a7fd9019-83a0-41a0-8380-fac36130cb3d-kube-api-access-rwqm7\") pod \"infra-operator-controller-manager-7d857cc749-7mtt9\" (UID: \"a7fd9019-83a0-41a0-8380-fac36130cb3d\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.943460 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vcc4\" (UniqueName: \"kubernetes.io/projected/87c9a6b1-e1ea-41dc-b77f-67b22bc39517-kube-api-access-2vcc4\") pod \"nova-operator-controller-manager-c7c776c96-czjxj\" (UID: \"87c9a6b1-e1ea-41dc-b77f-67b22bc39517\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.943517 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj54m\" (UniqueName: \"kubernetes.io/projected/4347835b-b3fa-40b5-b227-43c9da18c8d1-kube-api-access-dj54m\") pod \"keystone-operator-controller-manager-5bd55b4bff-cx7lp\" (UID: \"4347835b-b3fa-40b5-b227-43c9da18c8d1\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.943597 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2dc7bdb9-eab6-4497-8888-adadebf30b1a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-6txmj\" (UID: \"2dc7bdb9-eab6-4497-8888-adadebf30b1a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.943656 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vkmq\" (UniqueName: \"kubernetes.io/projected/851f4d4d-a27d-4fb8-9d26-9ea61e2eb423-kube-api-access-5vkmq\") pod \"neutron-operator-controller-manager-64d7b59854-vxc2x\" (UID: \"851f4d4d-a27d-4fb8-9d26-9ea61e2eb423\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.943682 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgqfm\" (UniqueName: \"kubernetes.io/projected/e2481ff3-5842-4351-b0fc-71fecd911258-kube-api-access-dgqfm\") pod \"octavia-operator-controller-manager-76fcc6dc7c-zhhc4\" (UID: \"e2481ff3-5842-4351-b0fc-71fecd911258\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.943705 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npxjp\" (UniqueName: \"kubernetes.io/projected/2dc7bdb9-eab6-4497-8888-adadebf30b1a-kube-api-access-npxjp\") pod \"openstack-baremetal-operator-controller-manager-6d776955-6txmj\" (UID: \"2dc7bdb9-eab6-4497-8888-adadebf30b1a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.943744 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfwv7\" (UniqueName: \"kubernetes.io/projected/623e3bae-ed71-479d-8ea3-ca0ca035a8a3-kube-api-access-kfwv7\") pod \"manila-operator-controller-manager-6d68dbc695-r2mxk\" (UID: \"623e3bae-ed71-479d-8ea3-ca0ca035a8a3\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" Sep 29 22:43:41 crc kubenswrapper[4922]: I0929 22:43:41.943780 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw7l9\" (UniqueName: \"kubernetes.io/projected/ce8f2c94-c958-4874-a2b8-9b3ee2ca943f-kube-api-access-vw7l9\") pod \"mariadb-operator-controller-manager-88c7-dwpq7\" (UID: \"ce8f2c94-c958-4874-a2b8-9b3ee2ca943f\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.004174 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfwv7\" (UniqueName: \"kubernetes.io/projected/623e3bae-ed71-479d-8ea3-ca0ca035a8a3-kube-api-access-kfwv7\") pod \"manila-operator-controller-manager-6d68dbc695-r2mxk\" (UID: \"623e3bae-ed71-479d-8ea3-ca0ca035a8a3\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.004691 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw7l9\" (UniqueName: \"kubernetes.io/projected/ce8f2c94-c958-4874-a2b8-9b3ee2ca943f-kube-api-access-vw7l9\") pod \"mariadb-operator-controller-manager-88c7-dwpq7\" (UID: \"ce8f2c94-c958-4874-a2b8-9b3ee2ca943f\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.009460 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj54m\" (UniqueName: \"kubernetes.io/projected/4347835b-b3fa-40b5-b227-43c9da18c8d1-kube-api-access-dj54m\") pod \"keystone-operator-controller-manager-5bd55b4bff-cx7lp\" (UID: \"4347835b-b3fa-40b5-b227-43c9da18c8d1\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.012057 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vkmq\" (UniqueName: \"kubernetes.io/projected/851f4d4d-a27d-4fb8-9d26-9ea61e2eb423-kube-api-access-5vkmq\") pod \"neutron-operator-controller-manager-64d7b59854-vxc2x\" (UID: \"851f4d4d-a27d-4fb8-9d26-9ea61e2eb423\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.020657 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.025809 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.045083 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2dc7bdb9-eab6-4497-8888-adadebf30b1a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-6txmj\" (UID: \"2dc7bdb9-eab6-4497-8888-adadebf30b1a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.045161 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqmt7\" (UniqueName: \"kubernetes.io/projected/9eb5c0db-802b-4f80-ac48-9f1e75e3cebb-kube-api-access-bqmt7\") pod \"ovn-operator-controller-manager-9976ff44c-qxlqv\" (UID: \"9eb5c0db-802b-4f80-ac48-9f1e75e3cebb\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.045204 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgqfm\" (UniqueName: \"kubernetes.io/projected/e2481ff3-5842-4351-b0fc-71fecd911258-kube-api-access-dgqfm\") pod \"octavia-operator-controller-manager-76fcc6dc7c-zhhc4\" (UID: \"e2481ff3-5842-4351-b0fc-71fecd911258\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.045225 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npxjp\" (UniqueName: \"kubernetes.io/projected/2dc7bdb9-eab6-4497-8888-adadebf30b1a-kube-api-access-npxjp\") pod \"openstack-baremetal-operator-controller-manager-6d776955-6txmj\" (UID: \"2dc7bdb9-eab6-4497-8888-adadebf30b1a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.045278 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vcc4\" (UniqueName: \"kubernetes.io/projected/87c9a6b1-e1ea-41dc-b77f-67b22bc39517-kube-api-access-2vcc4\") pod \"nova-operator-controller-manager-c7c776c96-czjxj\" (UID: \"87c9a6b1-e1ea-41dc-b77f-67b22bc39517\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" Sep 29 22:43:42 crc kubenswrapper[4922]: E0929 22:43:42.045664 4922 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 22:43:42 crc kubenswrapper[4922]: E0929 22:43:42.045737 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2dc7bdb9-eab6-4497-8888-adadebf30b1a-cert podName:2dc7bdb9-eab6-4497-8888-adadebf30b1a nodeName:}" failed. No retries permitted until 2025-09-29 22:43:42.545716646 +0000 UTC m=+1026.856005459 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2dc7bdb9-eab6-4497-8888-adadebf30b1a-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-6txmj" (UID: "2dc7bdb9-eab6-4497-8888-adadebf30b1a") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.046791 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.088771 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npxjp\" (UniqueName: \"kubernetes.io/projected/2dc7bdb9-eab6-4497-8888-adadebf30b1a-kube-api-access-npxjp\") pod \"openstack-baremetal-operator-controller-manager-6d776955-6txmj\" (UID: \"2dc7bdb9-eab6-4497-8888-adadebf30b1a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.157760 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqmt7\" (UniqueName: \"kubernetes.io/projected/9eb5c0db-802b-4f80-ac48-9f1e75e3cebb-kube-api-access-bqmt7\") pod \"ovn-operator-controller-manager-9976ff44c-qxlqv\" (UID: \"9eb5c0db-802b-4f80-ac48-9f1e75e3cebb\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.158728 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vcc4\" (UniqueName: \"kubernetes.io/projected/87c9a6b1-e1ea-41dc-b77f-67b22bc39517-kube-api-access-2vcc4\") pod \"nova-operator-controller-manager-c7c776c96-czjxj\" (UID: \"87c9a6b1-e1ea-41dc-b77f-67b22bc39517\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.158816 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.164809 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.167137 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.169812 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.184609 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-nwntv" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.198241 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.204751 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqmt7\" (UniqueName: \"kubernetes.io/projected/9eb5c0db-802b-4f80-ac48-9f1e75e3cebb-kube-api-access-bqmt7\") pod \"ovn-operator-controller-manager-9976ff44c-qxlqv\" (UID: \"9eb5c0db-802b-4f80-ac48-9f1e75e3cebb\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.207555 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgqfm\" (UniqueName: \"kubernetes.io/projected/e2481ff3-5842-4351-b0fc-71fecd911258-kube-api-access-dgqfm\") pod \"octavia-operator-controller-manager-76fcc6dc7c-zhhc4\" (UID: \"e2481ff3-5842-4351-b0fc-71fecd911258\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.219530 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.240935 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.243434 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.243645 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.253231 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.285470 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.285810 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-pxjvw" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.286918 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-bmhkf" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.287169 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njpvd\" (UniqueName: \"kubernetes.io/projected/55d075ba-936a-4e25-ac68-01ae1a6a0a33-kube-api-access-njpvd\") pod \"placement-operator-controller-manager-589c58c6c-mcrvk\" (UID: \"55d075ba-936a-4e25-ac68-01ae1a6a0a33\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.287465 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.295482 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.300422 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.302302 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.309722 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-5nk9l" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.314086 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.345665 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.347150 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.353109 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-7fs5w" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.371786 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.405810 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.409725 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a7fd9019-83a0-41a0-8380-fac36130cb3d-cert\") pod \"infra-operator-controller-manager-7d857cc749-7mtt9\" (UID: \"a7fd9019-83a0-41a0-8380-fac36130cb3d\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.409796 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zg6r\" (UniqueName: \"kubernetes.io/projected/5d6e0d8b-b5bf-49eb-8619-8f60d4177c32-kube-api-access-5zg6r\") pod \"swift-operator-controller-manager-bc7dc7bd9-dq4dx\" (UID: \"5d6e0d8b-b5bf-49eb-8619-8f60d4177c32\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.409843 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn8dz\" (UniqueName: \"kubernetes.io/projected/846b5189-20cf-414a-b682-a2bbc6e184cf-kube-api-access-cn8dz\") pod \"test-operator-controller-manager-f66b554c6-8kgtj\" (UID: \"846b5189-20cf-414a-b682-a2bbc6e184cf\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.409877 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2t2h\" (UniqueName: \"kubernetes.io/projected/f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5-kube-api-access-m2t2h\") pod \"telemetry-operator-controller-manager-b8d54b5d7-wg8cr\" (UID: \"f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.409910 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njpvd\" (UniqueName: \"kubernetes.io/projected/55d075ba-936a-4e25-ac68-01ae1a6a0a33-kube-api-access-njpvd\") pod \"placement-operator-controller-manager-589c58c6c-mcrvk\" (UID: \"55d075ba-936a-4e25-ac68-01ae1a6a0a33\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.415082 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a7fd9019-83a0-41a0-8380-fac36130cb3d-cert\") pod \"infra-operator-controller-manager-7d857cc749-7mtt9\" (UID: \"a7fd9019-83a0-41a0-8380-fac36130cb3d\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.449099 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njpvd\" (UniqueName: \"kubernetes.io/projected/55d075ba-936a-4e25-ac68-01ae1a6a0a33-kube-api-access-njpvd\") pod \"placement-operator-controller-manager-589c58c6c-mcrvk\" (UID: \"55d075ba-936a-4e25-ac68-01ae1a6a0a33\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.458641 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.460677 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.464423 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.469928 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-n6lx9" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.470173 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.490525 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.491715 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.494593 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-kpnwd" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.502467 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.508846 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.510633 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zg6r\" (UniqueName: \"kubernetes.io/projected/5d6e0d8b-b5bf-49eb-8619-8f60d4177c32-kube-api-access-5zg6r\") pod \"swift-operator-controller-manager-bc7dc7bd9-dq4dx\" (UID: \"5d6e0d8b-b5bf-49eb-8619-8f60d4177c32\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.510751 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn8dz\" (UniqueName: \"kubernetes.io/projected/846b5189-20cf-414a-b682-a2bbc6e184cf-kube-api-access-cn8dz\") pod \"test-operator-controller-manager-f66b554c6-8kgtj\" (UID: \"846b5189-20cf-414a-b682-a2bbc6e184cf\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.510844 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2t2h\" (UniqueName: \"kubernetes.io/projected/f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5-kube-api-access-m2t2h\") pod \"telemetry-operator-controller-manager-b8d54b5d7-wg8cr\" (UID: \"f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.511265 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbsd4\" (UniqueName: \"kubernetes.io/projected/7171ed90-0002-4a34-a417-39a2645e8566-kube-api-access-rbsd4\") pod \"watcher-operator-controller-manager-76669f99c-lr8wj\" (UID: \"7171ed90-0002-4a34-a417-39a2645e8566\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.522806 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.541350 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs"] Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.565837 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2t2h\" (UniqueName: \"kubernetes.io/projected/f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5-kube-api-access-m2t2h\") pod \"telemetry-operator-controller-manager-b8d54b5d7-wg8cr\" (UID: \"f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.567262 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn8dz\" (UniqueName: \"kubernetes.io/projected/846b5189-20cf-414a-b682-a2bbc6e184cf-kube-api-access-cn8dz\") pod \"test-operator-controller-manager-f66b554c6-8kgtj\" (UID: \"846b5189-20cf-414a-b682-a2bbc6e184cf\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.583591 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zg6r\" (UniqueName: \"kubernetes.io/projected/5d6e0d8b-b5bf-49eb-8619-8f60d4177c32-kube-api-access-5zg6r\") pod \"swift-operator-controller-manager-bc7dc7bd9-dq4dx\" (UID: \"5d6e0d8b-b5bf-49eb-8619-8f60d4177c32\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.614540 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/353ae411-6209-4172-94f7-0bad05ab725b-cert\") pod \"openstack-operator-controller-manager-5b58fb7c85-tzf2t\" (UID: \"353ae411-6209-4172-94f7-0bad05ab725b\") " pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.615048 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrk9v\" (UniqueName: \"kubernetes.io/projected/353ae411-6209-4172-94f7-0bad05ab725b-kube-api-access-lrk9v\") pod \"openstack-operator-controller-manager-5b58fb7c85-tzf2t\" (UID: \"353ae411-6209-4172-94f7-0bad05ab725b\") " pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.615144 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5pt9\" (UniqueName: \"kubernetes.io/projected/1e3f1a04-5e07-4c81-93dc-beca7a598caf-kube-api-access-j5pt9\") pod \"rabbitmq-cluster-operator-manager-79d8469568-8blrf\" (UID: \"1e3f1a04-5e07-4c81-93dc-beca7a598caf\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.615281 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbsd4\" (UniqueName: \"kubernetes.io/projected/7171ed90-0002-4a34-a417-39a2645e8566-kube-api-access-rbsd4\") pod \"watcher-operator-controller-manager-76669f99c-lr8wj\" (UID: \"7171ed90-0002-4a34-a417-39a2645e8566\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.615737 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2dc7bdb9-eab6-4497-8888-adadebf30b1a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-6txmj\" (UID: \"2dc7bdb9-eab6-4497-8888-adadebf30b1a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:42 crc kubenswrapper[4922]: E0929 22:43:42.616019 4922 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 22:43:42 crc kubenswrapper[4922]: E0929 22:43:42.616095 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2dc7bdb9-eab6-4497-8888-adadebf30b1a-cert podName:2dc7bdb9-eab6-4497-8888-adadebf30b1a nodeName:}" failed. No retries permitted until 2025-09-29 22:43:43.616075042 +0000 UTC m=+1027.926363855 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2dc7bdb9-eab6-4497-8888-adadebf30b1a-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-6txmj" (UID: "2dc7bdb9-eab6-4497-8888-adadebf30b1a") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.635714 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbsd4\" (UniqueName: \"kubernetes.io/projected/7171ed90-0002-4a34-a417-39a2645e8566-kube-api-access-rbsd4\") pod \"watcher-operator-controller-manager-76669f99c-lr8wj\" (UID: \"7171ed90-0002-4a34-a417-39a2645e8566\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.678165 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.706031 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.716889 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/353ae411-6209-4172-94f7-0bad05ab725b-cert\") pod \"openstack-operator-controller-manager-5b58fb7c85-tzf2t\" (UID: \"353ae411-6209-4172-94f7-0bad05ab725b\") " pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.716953 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrk9v\" (UniqueName: \"kubernetes.io/projected/353ae411-6209-4172-94f7-0bad05ab725b-kube-api-access-lrk9v\") pod \"openstack-operator-controller-manager-5b58fb7c85-tzf2t\" (UID: \"353ae411-6209-4172-94f7-0bad05ab725b\") " pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.716984 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5pt9\" (UniqueName: \"kubernetes.io/projected/1e3f1a04-5e07-4c81-93dc-beca7a598caf-kube-api-access-j5pt9\") pod \"rabbitmq-cluster-operator-manager-79d8469568-8blrf\" (UID: \"1e3f1a04-5e07-4c81-93dc-beca7a598caf\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" Sep 29 22:43:42 crc kubenswrapper[4922]: E0929 22:43:42.717659 4922 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 22:43:42 crc kubenswrapper[4922]: E0929 22:43:42.717776 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/353ae411-6209-4172-94f7-0bad05ab725b-cert podName:353ae411-6209-4172-94f7-0bad05ab725b nodeName:}" failed. No retries permitted until 2025-09-29 22:43:43.217743383 +0000 UTC m=+1027.528032196 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/353ae411-6209-4172-94f7-0bad05ab725b-cert") pod "openstack-operator-controller-manager-5b58fb7c85-tzf2t" (UID: "353ae411-6209-4172-94f7-0bad05ab725b") : secret "webhook-server-cert" not found Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.740250 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.749357 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5pt9\" (UniqueName: \"kubernetes.io/projected/1e3f1a04-5e07-4c81-93dc-beca7a598caf-kube-api-access-j5pt9\") pod \"rabbitmq-cluster-operator-manager-79d8469568-8blrf\" (UID: \"1e3f1a04-5e07-4c81-93dc-beca7a598caf\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.753153 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrk9v\" (UniqueName: \"kubernetes.io/projected/353ae411-6209-4172-94f7-0bad05ab725b-kube-api-access-lrk9v\") pod \"openstack-operator-controller-manager-5b58fb7c85-tzf2t\" (UID: \"353ae411-6209-4172-94f7-0bad05ab725b\") " pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.778726 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.784626 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" Sep 29 22:43:42 crc kubenswrapper[4922]: I0929 22:43:42.940221 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.071338 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.228921 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/353ae411-6209-4172-94f7-0bad05ab725b-cert\") pod \"openstack-operator-controller-manager-5b58fb7c85-tzf2t\" (UID: \"353ae411-6209-4172-94f7-0bad05ab725b\") " pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.235228 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/353ae411-6209-4172-94f7-0bad05ab725b-cert\") pod \"openstack-operator-controller-manager-5b58fb7c85-tzf2t\" (UID: \"353ae411-6209-4172-94f7-0bad05ab725b\") " pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.336197 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" event={"ID":"7bb037db-f6bf-4a16-918f-153b149b9ab4","Type":"ContainerStarted","Data":"6f38a729c3fb0555f7d29c3577d201f0413ceaf4dfd1ee0f39fb9c6527e397fd"} Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.337569 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" event={"ID":"b7651f52-4ceb-4d53-b74a-dfb7da473f68","Type":"ContainerStarted","Data":"e3d188bad0afd481a5e13b9541db3fa02af33aaceedf97cab7bebc8371d5b491"} Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.339184 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" event={"ID":"7b4517a9-f6ca-4209-9c59-a862b207ee30","Type":"ContainerStarted","Data":"b1a0840ebb07f62a3b1c289d2e9e45ac2c95be8c9554328be46f1f0fe48864a3"} Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.410083 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.426344 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.457838 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.467706 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.482914 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn"] Sep 29 22:43:43 crc kubenswrapper[4922]: W0929 22:43:43.485168 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fc57cef_6bbd_4925_82a9_0efb9622aa81.slice/crio-6c9ab712619886cd67a60dabcf03188841e79cddaf847f795799a8b5662d807a WatchSource:0}: Error finding container 6c9ab712619886cd67a60dabcf03188841e79cddaf847f795799a8b5662d807a: Status 404 returned error can't find the container with id 6c9ab712619886cd67a60dabcf03188841e79cddaf847f795799a8b5662d807a Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.489669 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj"] Sep 29 22:43:43 crc kubenswrapper[4922]: W0929 22:43:43.493481 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7fd9019_83a0_41a0_8380_fac36130cb3d.slice/crio-967b3c0132615585fb7efb1d1b3fd6ad3e36e74457801472838711f2d9ffdc42 WatchSource:0}: Error finding container 967b3c0132615585fb7efb1d1b3fd6ad3e36e74457801472838711f2d9ffdc42: Status 404 returned error can't find the container with id 967b3c0132615585fb7efb1d1b3fd6ad3e36e74457801472838711f2d9ffdc42 Sep 29 22:43:43 crc kubenswrapper[4922]: W0929 22:43:43.493726 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4347835b_b3fa_40b5_b227_43c9da18c8d1.slice/crio-f36cd456d588416786f498b5b543638cc3079ebf7d98413dce5d212da111bf23 WatchSource:0}: Error finding container f36cd456d588416786f498b5b543638cc3079ebf7d98413dce5d212da111bf23: Status 404 returned error can't find the container with id f36cd456d588416786f498b5b543638cc3079ebf7d98413dce5d212da111bf23 Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.499624 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.506006 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs"] Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.512461 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:f5f0d2eb534f763cf6578af513add1c21c1659b2cd75214dfddfedb9eebf6397,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wfcrw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-9f4696d94-tplbn_openstack-operators(2fc57cef-6bbd-4925-82a9-0efb9622aa81): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.517526 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9"] Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.523132 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bqmt7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-9976ff44c-qxlqv_openstack-operators(9eb5c0db-802b-4f80-ac48-9f1e75e3cebb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.526646 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.533486 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.536157 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.540199 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.671707 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2dc7bdb9-eab6-4497-8888-adadebf30b1a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-6txmj\" (UID: \"2dc7bdb9-eab6-4497-8888-adadebf30b1a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.681666 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2dc7bdb9-eab6-4497-8888-adadebf30b1a-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-6txmj\" (UID: \"2dc7bdb9-eab6-4497-8888-adadebf30b1a\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.727852 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf"] Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.749111 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" podUID="2fc57cef-6bbd-4925-82a9-0efb9622aa81" Sep 29 22:43:43 crc kubenswrapper[4922]: W0929 22:43:43.756139 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55d075ba_936a_4e25_ac68_01ae1a6a0a33.slice/crio-27ba167d8eaad4e2c1ecccf7e41e7f1050b3605c2e45156545d96ee3e004f0d2 WatchSource:0}: Error finding container 27ba167d8eaad4e2c1ecccf7e41e7f1050b3605c2e45156545d96ee3e004f0d2: Status 404 returned error can't find the container with id 27ba167d8eaad4e2c1ecccf7e41e7f1050b3605c2e45156545d96ee3e004f0d2 Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.756545 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.766933 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk"] Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.770330 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" podUID="9eb5c0db-802b-4f80-ac48-9f1e75e3cebb" Sep 29 22:43:43 crc kubenswrapper[4922]: W0929 22:43:43.770569 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7171ed90_0002_4a34_a417_39a2645e8566.slice/crio-230e6fa7fc1fedd191f5b05d1acc19f001c2c6ee1bded71dc4ae2b59c1100de6 WatchSource:0}: Error finding container 230e6fa7fc1fedd191f5b05d1acc19f001c2c6ee1bded71dc4ae2b59c1100de6: Status 404 returned error can't find the container with id 230e6fa7fc1fedd191f5b05d1acc19f001c2c6ee1bded71dc4ae2b59c1100de6 Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.772460 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj"] Sep 29 22:43:43 crc kubenswrapper[4922]: W0929 22:43:43.777775 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod846b5189_20cf_414a_b682_a2bbc6e184cf.slice/crio-7b6a9e3d81118a8c3b8cf1a7cc1b84bc9838c727aee4f9d5efb9a97d497a9050 WatchSource:0}: Error finding container 7b6a9e3d81118a8c3b8cf1a7cc1b84bc9838c727aee4f9d5efb9a97d497a9050: Status 404 returned error can't find the container with id 7b6a9e3d81118a8c3b8cf1a7cc1b84bc9838c727aee4f9d5efb9a97d497a9050 Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.779567 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.783782 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx"] Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.787288 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t"] Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.790019 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rbsd4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-76669f99c-lr8wj_openstack-operators(7171ed90-0002-4a34-a417-39a2645e8566): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.792360 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5zg6r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bc7dc7bd9-dq4dx_openstack-operators(5d6e0d8b-b5bf-49eb-8619-8f60d4177c32): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.792694 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j5pt9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-8blrf_openstack-operators(1e3f1a04-5e07-4c81-93dc-beca7a598caf): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.793423 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cn8dz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-f66b554c6-8kgtj_openstack-operators(846b5189-20cf-414a-b682-a2bbc6e184cf): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.793830 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" podUID="1e3f1a04-5e07-4c81-93dc-beca7a598caf" Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.795468 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m2t2h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-b8d54b5d7-wg8cr_openstack-operators(f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 22:43:43 crc kubenswrapper[4922]: I0929 22:43:43.814187 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.937589 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" podUID="7171ed90-0002-4a34-a417-39a2645e8566" Sep 29 22:43:43 crc kubenswrapper[4922]: E0929 22:43:43.941205 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" podUID="5d6e0d8b-b5bf-49eb-8619-8f60d4177c32" Sep 29 22:43:44 crc kubenswrapper[4922]: E0929 22:43:44.072163 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" podUID="f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5" Sep 29 22:43:44 crc kubenswrapper[4922]: E0929 22:43:44.146853 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" podUID="846b5189-20cf-414a-b682-a2bbc6e184cf" Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.360709 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" event={"ID":"e2481ff3-5842-4351-b0fc-71fecd911258","Type":"ContainerStarted","Data":"3d5ab9e1af6532f2ba60bb6794aabbe513edc92ee8bd31195f147c068a1dd816"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.364311 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" event={"ID":"623e3bae-ed71-479d-8ea3-ca0ca035a8a3","Type":"ContainerStarted","Data":"550410eb644c9d566ff3a0dcfaa903c8182017d9607fabce28551ff39a7cb8cf"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.395530 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" event={"ID":"55d075ba-936a-4e25-ac68-01ae1a6a0a33","Type":"ContainerStarted","Data":"27ba167d8eaad4e2c1ecccf7e41e7f1050b3605c2e45156545d96ee3e004f0d2"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.408341 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" event={"ID":"2fc57cef-6bbd-4925-82a9-0efb9622aa81","Type":"ContainerStarted","Data":"2de850a2fa33e044815fffac6fde407cf0ea60ff2e6df3388377c38879119bfc"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.408386 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" event={"ID":"2fc57cef-6bbd-4925-82a9-0efb9622aa81","Type":"ContainerStarted","Data":"6c9ab712619886cd67a60dabcf03188841e79cddaf847f795799a8b5662d807a"} Sep 29 22:43:44 crc kubenswrapper[4922]: E0929 22:43:44.411748 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:f5f0d2eb534f763cf6578af513add1c21c1659b2cd75214dfddfedb9eebf6397\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" podUID="2fc57cef-6bbd-4925-82a9-0efb9622aa81" Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.436786 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" event={"ID":"ce8f2c94-c958-4874-a2b8-9b3ee2ca943f","Type":"ContainerStarted","Data":"d4002d8683519c3690c396623dab6e87b21d4067005f5f6e2816162f925ab8b7"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.436842 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" event={"ID":"10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89","Type":"ContainerStarted","Data":"e47e19f0998ded0f2c190af4959668b86379ae75ce0cffc0f57044441bf3f3db"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.438300 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" event={"ID":"4347835b-b3fa-40b5-b227-43c9da18c8d1","Type":"ContainerStarted","Data":"f36cd456d588416786f498b5b543638cc3079ebf7d98413dce5d212da111bf23"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.442990 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" event={"ID":"1e3f1a04-5e07-4c81-93dc-beca7a598caf","Type":"ContainerStarted","Data":"7af10bdfd1a278e2407dcdcba8896cd916ede382dfd908f2fe505b5eb803f549"} Sep 29 22:43:44 crc kubenswrapper[4922]: E0929 22:43:44.462503 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" podUID="1e3f1a04-5e07-4c81-93dc-beca7a598caf" Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.463985 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" event={"ID":"a7fd9019-83a0-41a0-8380-fac36130cb3d","Type":"ContainerStarted","Data":"967b3c0132615585fb7efb1d1b3fd6ad3e36e74457801472838711f2d9ffdc42"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.467782 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" event={"ID":"9eb5c0db-802b-4f80-ac48-9f1e75e3cebb","Type":"ContainerStarted","Data":"a25931d31a604beb1a76863dd0bb1738b1738ddaa2ae1a94c8e10dd6a7ef4c5a"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.467809 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" event={"ID":"9eb5c0db-802b-4f80-ac48-9f1e75e3cebb","Type":"ContainerStarted","Data":"30621d3c61459df658d77c481086d131c63e768e17a30659affc712c104f3df1"} Sep 29 22:43:44 crc kubenswrapper[4922]: E0929 22:43:44.473844 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" podUID="9eb5c0db-802b-4f80-ac48-9f1e75e3cebb" Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.485355 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" event={"ID":"87c9a6b1-e1ea-41dc-b77f-67b22bc39517","Type":"ContainerStarted","Data":"a1cc408e7bda40d1365e1b1ec633b0bea5ef2d44bc7b2f8edaf7270d4c37820c"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.492482 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" event={"ID":"44863fa1-d920-42fc-a5d2-197762fe8c37","Type":"ContainerStarted","Data":"c5a621c6b601b6c8a113bf9ae5c9a9fbb5200ee1799a8a0ea6e480fbe94f7fab"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.494183 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" event={"ID":"353ae411-6209-4172-94f7-0bad05ab725b","Type":"ContainerStarted","Data":"bb54e4ab57edf68adda7415bab8462bf6d5dad1f70d184b259926aad909e6721"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.494211 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" event={"ID":"353ae411-6209-4172-94f7-0bad05ab725b","Type":"ContainerStarted","Data":"06b6986c0ae99487ea1862f0d0e2f6c1835636cc7baf2d0ce2def1ca7ba3d043"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.494222 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" event={"ID":"353ae411-6209-4172-94f7-0bad05ab725b","Type":"ContainerStarted","Data":"b939e5d0aea38f56de17a1a2ebf2997f5112d28aaec3c6c36b414aaca8fc328c"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.495214 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.530296 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj"] Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.535885 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" event={"ID":"846b5189-20cf-414a-b682-a2bbc6e184cf","Type":"ContainerStarted","Data":"c080a7d8bb819f1117151f6f1239f2823b17d68f8746a83111b08934b52d19b3"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.535948 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" event={"ID":"846b5189-20cf-414a-b682-a2bbc6e184cf","Type":"ContainerStarted","Data":"7b6a9e3d81118a8c3b8cf1a7cc1b84bc9838c727aee4f9d5efb9a97d497a9050"} Sep 29 22:43:44 crc kubenswrapper[4922]: W0929 22:43:44.538249 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2dc7bdb9_eab6_4497_8888_adadebf30b1a.slice/crio-cff860b3fac4600f71d208b4ad237e3528e095c0f08cea06b2f6971384f7cc53 WatchSource:0}: Error finding container cff860b3fac4600f71d208b4ad237e3528e095c0f08cea06b2f6971384f7cc53: Status 404 returned error can't find the container with id cff860b3fac4600f71d208b4ad237e3528e095c0f08cea06b2f6971384f7cc53 Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.539102 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" event={"ID":"f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5","Type":"ContainerStarted","Data":"ec0dcc391fc411f739e3df4f0844b342f4bbb6096a1e1b0eabc8cce7972a1342"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.544947 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" event={"ID":"f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5","Type":"ContainerStarted","Data":"2a2ff3c396f7280e12e1ba3f634f6a02e55e26863774117c421115568df5feab"} Sep 29 22:43:44 crc kubenswrapper[4922]: E0929 22:43:44.546877 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" podUID="f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5" Sep 29 22:43:44 crc kubenswrapper[4922]: E0929 22:43:44.556758 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" podUID="846b5189-20cf-414a-b682-a2bbc6e184cf" Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.576823 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" podStartSLOduration=2.576797441 podStartE2EDuration="2.576797441s" podCreationTimestamp="2025-09-29 22:43:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:43:44.552091553 +0000 UTC m=+1028.862380376" watchObservedRunningTime="2025-09-29 22:43:44.576797441 +0000 UTC m=+1028.887086254" Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.588553 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" event={"ID":"851f4d4d-a27d-4fb8-9d26-9ea61e2eb423","Type":"ContainerStarted","Data":"27cd092d5c17c6e9351c0b937453f966518efed2cdb1df3a5138dbc0f2cb0872"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.605366 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" event={"ID":"5d6e0d8b-b5bf-49eb-8619-8f60d4177c32","Type":"ContainerStarted","Data":"bee8bdb12d46319da8d631d1829fe15feff637cf83304eaeba215660f325154e"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.634833 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" event={"ID":"5d6e0d8b-b5bf-49eb-8619-8f60d4177c32","Type":"ContainerStarted","Data":"7275d365c35cf1c4ac0765010e2cdddacda7ae7b65fcd73d697b15f18087fa7b"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.634898 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" event={"ID":"7171ed90-0002-4a34-a417-39a2645e8566","Type":"ContainerStarted","Data":"b6afc28cbe03fe32eb0219515f68cc18b8608bfc78f699d7a2a9c38edd2c230a"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.634919 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" event={"ID":"7171ed90-0002-4a34-a417-39a2645e8566","Type":"ContainerStarted","Data":"230e6fa7fc1fedd191f5b05d1acc19f001c2c6ee1bded71dc4ae2b59c1100de6"} Sep 29 22:43:44 crc kubenswrapper[4922]: I0929 22:43:44.634932 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" event={"ID":"d6777dc8-0849-4744-bc01-7f790064dcfe","Type":"ContainerStarted","Data":"7540834a1247854982babc332447b54ae980a6a73b95b6ebdcc236b6380667fc"} Sep 29 22:43:44 crc kubenswrapper[4922]: E0929 22:43:44.617070 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" podUID="7171ed90-0002-4a34-a417-39a2645e8566" Sep 29 22:43:44 crc kubenswrapper[4922]: E0929 22:43:44.609439 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" podUID="5d6e0d8b-b5bf-49eb-8619-8f60d4177c32" Sep 29 22:43:45 crc kubenswrapper[4922]: I0929 22:43:45.627211 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" event={"ID":"2dc7bdb9-eab6-4497-8888-adadebf30b1a","Type":"ContainerStarted","Data":"cff860b3fac4600f71d208b4ad237e3528e095c0f08cea06b2f6971384f7cc53"} Sep 29 22:43:45 crc kubenswrapper[4922]: E0929 22:43:45.629529 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" podUID="7171ed90-0002-4a34-a417-39a2645e8566" Sep 29 22:43:45 crc kubenswrapper[4922]: E0929 22:43:45.629592 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" podUID="1e3f1a04-5e07-4c81-93dc-beca7a598caf" Sep 29 22:43:45 crc kubenswrapper[4922]: E0929 22:43:45.629829 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:8fdf377daf05e2fa7346505017078fa81981dd945bf635a64c8022633c68118f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" podUID="f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5" Sep 29 22:43:45 crc kubenswrapper[4922]: E0929 22:43:45.629880 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" podUID="846b5189-20cf-414a-b682-a2bbc6e184cf" Sep 29 22:43:45 crc kubenswrapper[4922]: E0929 22:43:45.630150 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" podUID="5d6e0d8b-b5bf-49eb-8619-8f60d4177c32" Sep 29 22:43:45 crc kubenswrapper[4922]: E0929 22:43:45.631050 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" podUID="9eb5c0db-802b-4f80-ac48-9f1e75e3cebb" Sep 29 22:43:45 crc kubenswrapper[4922]: E0929 22:43:45.631284 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:f5f0d2eb534f763cf6578af513add1c21c1659b2cd75214dfddfedb9eebf6397\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" podUID="2fc57cef-6bbd-4925-82a9-0efb9622aa81" Sep 29 22:43:53 crc kubenswrapper[4922]: I0929 22:43:53.420995 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-5b58fb7c85-tzf2t" Sep 29 22:43:55 crc kubenswrapper[4922]: E0929 22:43:55.486361 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2" Sep 29 22:43:55 crc kubenswrapper[4922]: E0929 22:43:55.487136 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-w7rmd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-84f4f7b77b-7b99q_openstack-operators(10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:43:55 crc kubenswrapper[4922]: E0929 22:43:55.960915 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef" Sep 29 22:43:55 crc kubenswrapper[4922]: E0929 22:43:55.961098 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2vcc4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-c7c776c96-czjxj_openstack-operators(87c9a6b1-e1ea-41dc-b77f-67b22bc39517): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:43:56 crc kubenswrapper[4922]: E0929 22:43:56.325849 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c" Sep 29 22:43:56 crc kubenswrapper[4922]: E0929 22:43:56.326086 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kmh9r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5d889d78cf-trqhs_openstack-operators(44863fa1-d920-42fc-a5d2-197762fe8c37): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:43:56 crc kubenswrapper[4922]: E0929 22:43:56.718807 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6" Sep 29 22:43:56 crc kubenswrapper[4922]: E0929 22:43:56.719893 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_LIGHTSPEED_IMAGE_URL_DEFAULT,Value:quay.io/openstack-lightspeed/rag-content:os-docs-2024.2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-npxjp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-6d776955-6txmj_openstack-operators(2dc7bdb9-eab6-4497-8888-adadebf30b1a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:43:57 crc kubenswrapper[4922]: E0929 22:43:57.124026 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" podUID="87c9a6b1-e1ea-41dc-b77f-67b22bc39517" Sep 29 22:43:57 crc kubenswrapper[4922]: E0929 22:43:57.189207 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" podUID="2dc7bdb9-eab6-4497-8888-adadebf30b1a" Sep 29 22:43:57 crc kubenswrapper[4922]: E0929 22:43:57.229086 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" podUID="10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89" Sep 29 22:43:57 crc kubenswrapper[4922]: E0929 22:43:57.251323 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" podUID="44863fa1-d920-42fc-a5d2-197762fe8c37" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.724243 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" event={"ID":"d6777dc8-0849-4744-bc01-7f790064dcfe","Type":"ContainerStarted","Data":"ce0d5fa9007c111e9d701cedae452272bc63c6bcf56bdcf5f4f99eea3887d7e0"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.735190 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" event={"ID":"4347835b-b3fa-40b5-b227-43c9da18c8d1","Type":"ContainerStarted","Data":"37c417ebefef41a7b8c01a3f3b2bafb5b6e851af16702e485a6947b963d3a97e"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.740875 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" event={"ID":"b7651f52-4ceb-4d53-b74a-dfb7da473f68","Type":"ContainerStarted","Data":"39aded832578ddcf3f4cfb317281bedb6ea3f255d612bf3d27ceec9cc01f8986"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.745852 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" event={"ID":"851f4d4d-a27d-4fb8-9d26-9ea61e2eb423","Type":"ContainerStarted","Data":"e24b2bdfa21c25e6d8ced80bf0b08633ccb0716ca896cf770987f1187a5c46e8"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.759253 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" event={"ID":"ce8f2c94-c958-4874-a2b8-9b3ee2ca943f","Type":"ContainerStarted","Data":"3504972219f21c261cc532f9742cb7d55f739f6ae2b6501100e82eaa1c6911d1"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.760118 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.761674 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" event={"ID":"a7fd9019-83a0-41a0-8380-fac36130cb3d","Type":"ContainerStarted","Data":"dfcb571362f41505a6475d8845f29a9fec38b220e698b0c05cb3196658dc6ba6"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.761699 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" event={"ID":"a7fd9019-83a0-41a0-8380-fac36130cb3d","Type":"ContainerStarted","Data":"c0a854bd7ef093903fdc882c68c8a5957a2cfd09440f7565078a7aeb0f747718"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.762030 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.763542 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" event={"ID":"2dc7bdb9-eab6-4497-8888-adadebf30b1a","Type":"ContainerStarted","Data":"c6694d62a598aa822d4b9ae79b962bf734c471eb6e836e5106ab6a5e435351e1"} Sep 29 22:43:57 crc kubenswrapper[4922]: E0929 22:43:57.768310 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" podUID="2dc7bdb9-eab6-4497-8888-adadebf30b1a" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.770516 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" event={"ID":"44863fa1-d920-42fc-a5d2-197762fe8c37","Type":"ContainerStarted","Data":"15679a5da8ec01c38370735d0418af95768bd2e9eee142128a3f54b85b8b0070"} Sep 29 22:43:57 crc kubenswrapper[4922]: E0929 22:43:57.771379 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" podUID="44863fa1-d920-42fc-a5d2-197762fe8c37" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.774157 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" event={"ID":"7bb037db-f6bf-4a16-918f-153b149b9ab4","Type":"ContainerStarted","Data":"d4896d27c1b9286121f47bea48fd846753bbbe6d45fbff04472fc89e6676dc07"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.775115 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" event={"ID":"623e3bae-ed71-479d-8ea3-ca0ca035a8a3","Type":"ContainerStarted","Data":"38d2e37d37a834c95404f1e2a694710e7ac6f29b4cc8e26d2395698d73b885a9"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.776009 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" event={"ID":"55d075ba-936a-4e25-ac68-01ae1a6a0a33","Type":"ContainerStarted","Data":"d3649afc0e0d1d26fbe129eced7947a64a7663e1706a085a7e5b73402f577980"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.782133 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" event={"ID":"10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89","Type":"ContainerStarted","Data":"38857df1bfc62184a5962402ab6d0b5ae1f78c3dca551523739254198cf7b1c6"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.783727 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" podStartSLOduration=3.48102117 podStartE2EDuration="16.783717829s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.474129424 +0000 UTC m=+1027.784418237" lastFinishedPulling="2025-09-29 22:43:56.776826083 +0000 UTC m=+1041.087114896" observedRunningTime="2025-09-29 22:43:57.778829776 +0000 UTC m=+1042.089118589" watchObservedRunningTime="2025-09-29 22:43:57.783717829 +0000 UTC m=+1042.094006642" Sep 29 22:43:57 crc kubenswrapper[4922]: E0929 22:43:57.788070 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2\\\"\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" podUID="10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.789349 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" event={"ID":"e2481ff3-5842-4351-b0fc-71fecd911258","Type":"ContainerStarted","Data":"fc55d20c6ed2b1ffcf7165ff290895f7a3bf81fdae5e0a2efcafe4e8ea8c19cb"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.790000 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.803954 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" podStartSLOduration=3.531373088 podStartE2EDuration="16.803940509s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.508877158 +0000 UTC m=+1027.819165971" lastFinishedPulling="2025-09-29 22:43:56.781444579 +0000 UTC m=+1041.091733392" observedRunningTime="2025-09-29 22:43:57.801846576 +0000 UTC m=+1042.112135389" watchObservedRunningTime="2025-09-29 22:43:57.803940509 +0000 UTC m=+1042.114229322" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.808195 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" event={"ID":"7b4517a9-f6ca-4209-9c59-a862b207ee30","Type":"ContainerStarted","Data":"a38a069dd8aa3bc3cacecf932020d75b977478ee874366f16828e660fd79810d"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.808232 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" event={"ID":"7b4517a9-f6ca-4209-9c59-a862b207ee30","Type":"ContainerStarted","Data":"5338c818f2188a85c6db4c3e3be97e2b7c48a41679e41c47af97dd2aa6ed1b4b"} Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.808812 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.834576 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" event={"ID":"87c9a6b1-e1ea-41dc-b77f-67b22bc39517","Type":"ContainerStarted","Data":"bc5f0439d34fb6736dea92ba21114b1d7509c66ed705f9103f1630facb705192"} Sep 29 22:43:57 crc kubenswrapper[4922]: E0929 22:43:57.845939 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" podUID="87c9a6b1-e1ea-41dc-b77f-67b22bc39517" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.920159 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" podStartSLOduration=3.623553578 podStartE2EDuration="16.920141669s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.483989016 +0000 UTC m=+1027.794277829" lastFinishedPulling="2025-09-29 22:43:56.780577107 +0000 UTC m=+1041.090865920" observedRunningTime="2025-09-29 22:43:57.897138429 +0000 UTC m=+1042.207427242" watchObservedRunningTime="2025-09-29 22:43:57.920141669 +0000 UTC m=+1042.230430482" Sep 29 22:43:57 crc kubenswrapper[4922]: I0929 22:43:57.945188 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" podStartSLOduration=3.17580553 podStartE2EDuration="16.94517365s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:42.999194144 +0000 UTC m=+1027.309482957" lastFinishedPulling="2025-09-29 22:43:56.768562264 +0000 UTC m=+1041.078851077" observedRunningTime="2025-09-29 22:43:57.941708892 +0000 UTC m=+1042.251997705" watchObservedRunningTime="2025-09-29 22:43:57.94517365 +0000 UTC m=+1042.255462463" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.843018 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" event={"ID":"e2481ff3-5842-4351-b0fc-71fecd911258","Type":"ContainerStarted","Data":"dbfc3470482c6f88d245c7d4356f81bce1a3b8dcdd25e9e941abea86f4527b4b"} Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.845754 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" event={"ID":"7bb037db-f6bf-4a16-918f-153b149b9ab4","Type":"ContainerStarted","Data":"58f602d9881a81fd4b61b5f6b01c4b5ff351788a78d32e6d4c4ca85021388002"} Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.845887 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.855631 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" event={"ID":"55d075ba-936a-4e25-ac68-01ae1a6a0a33","Type":"ContainerStarted","Data":"241f5e29d48ba8c10ac73d4cd20fd721639ebc8896ae170d3e57608422496759"} Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.856011 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.859140 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" event={"ID":"4347835b-b3fa-40b5-b227-43c9da18c8d1","Type":"ContainerStarted","Data":"ffd4438d17f72309207adc1216b912398ee8b7490e3360f78630c81b92ae00a5"} Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.859615 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.861458 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" event={"ID":"ce8f2c94-c958-4874-a2b8-9b3ee2ca943f","Type":"ContainerStarted","Data":"c4821f3bffab7c892b8249a01d943c7c951f66c5858c955566641da2c65ab626"} Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.864111 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" event={"ID":"d6777dc8-0849-4744-bc01-7f790064dcfe","Type":"ContainerStarted","Data":"9c3be3bfe62e52e00b52401b5ced2f528344dc819a1c5cf909b7422380873fcf"} Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.866057 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.872093 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" event={"ID":"b7651f52-4ceb-4d53-b74a-dfb7da473f68","Type":"ContainerStarted","Data":"b17327d5697117f44cbd1959bcd20fe4e04cdf37fafb34efe78cc5792457bd48"} Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.872252 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.873310 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" podStartSLOduration=4.171406473 podStartE2EDuration="17.87329994s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.077733706 +0000 UTC m=+1027.388022519" lastFinishedPulling="2025-09-29 22:43:56.779627173 +0000 UTC m=+1041.089915986" observedRunningTime="2025-09-29 22:43:58.866279093 +0000 UTC m=+1043.176567906" watchObservedRunningTime="2025-09-29 22:43:58.87329994 +0000 UTC m=+1043.183588753" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.874587 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" event={"ID":"851f4d4d-a27d-4fb8-9d26-9ea61e2eb423","Type":"ContainerStarted","Data":"4d409b5eca1700bc91156e4785c59792acd138e9ac414f55b393021cf14055d9"} Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.875166 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.877341 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" event={"ID":"623e3bae-ed71-479d-8ea3-ca0ca035a8a3","Type":"ContainerStarted","Data":"e107c7511fdfc0fde0b69e6ac49c516d6e14176069906cb0a66f0acd96814350"} Sep 29 22:43:58 crc kubenswrapper[4922]: E0929 22:43:58.879357 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:917e6dcc519277c46e42898bc9f0f066790fa7b9633fcde668cc8a68a547c13c\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" podUID="44863fa1-d920-42fc-a5d2-197762fe8c37" Sep 29 22:43:58 crc kubenswrapper[4922]: E0929 22:43:58.879399 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" podUID="87c9a6b1-e1ea-41dc-b77f-67b22bc39517" Sep 29 22:43:58 crc kubenswrapper[4922]: E0929 22:43:58.879508 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2\\\"\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" podUID="10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89" Sep 29 22:43:58 crc kubenswrapper[4922]: E0929 22:43:58.880142 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" podUID="2dc7bdb9-eab6-4497-8888-adadebf30b1a" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.885169 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" podStartSLOduration=4.52469112 podStartE2EDuration="17.885146219s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.432643804 +0000 UTC m=+1027.742932617" lastFinishedPulling="2025-09-29 22:43:56.793098903 +0000 UTC m=+1041.103387716" observedRunningTime="2025-09-29 22:43:58.881899317 +0000 UTC m=+1043.192188130" watchObservedRunningTime="2025-09-29 22:43:58.885146219 +0000 UTC m=+1043.195435032" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.903767 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" podStartSLOduration=4.63838617 podStartE2EDuration="17.903740368s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.50851958 +0000 UTC m=+1027.818808393" lastFinishedPulling="2025-09-29 22:43:56.773873778 +0000 UTC m=+1041.084162591" observedRunningTime="2025-09-29 22:43:58.899204573 +0000 UTC m=+1043.209493386" watchObservedRunningTime="2025-09-29 22:43:58.903740368 +0000 UTC m=+1043.214029191" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.913493 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" podStartSLOduration=4.891009293 podStartE2EDuration="17.913477093s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.758939088 +0000 UTC m=+1028.069227901" lastFinishedPulling="2025-09-29 22:43:56.781406888 +0000 UTC m=+1041.091695701" observedRunningTime="2025-09-29 22:43:58.911038232 +0000 UTC m=+1043.221327045" watchObservedRunningTime="2025-09-29 22:43:58.913477093 +0000 UTC m=+1043.223765906" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.915601 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.915658 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.915721 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.916464 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"92b5767336a72e147921a9d2961a6367ee20762375b4581c376088ef25b4feea"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.916538 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://92b5767336a72e147921a9d2961a6367ee20762375b4581c376088ef25b4feea" gracePeriod=600 Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.947193 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" podStartSLOduration=4.67371929 podStartE2EDuration="17.947159272s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.50853616 +0000 UTC m=+1027.818824983" lastFinishedPulling="2025-09-29 22:43:56.781976152 +0000 UTC m=+1041.092264965" observedRunningTime="2025-09-29 22:43:58.928707797 +0000 UTC m=+1043.238996620" watchObservedRunningTime="2025-09-29 22:43:58.947159272 +0000 UTC m=+1043.257448105" Sep 29 22:43:58 crc kubenswrapper[4922]: I0929 22:43:58.965126 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" podStartSLOduration=4.665561553 podStartE2EDuration="17.965095255s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.511611466 +0000 UTC m=+1027.821900279" lastFinishedPulling="2025-09-29 22:43:56.811145118 +0000 UTC m=+1041.121433981" observedRunningTime="2025-09-29 22:43:58.9589509 +0000 UTC m=+1043.269239713" watchObservedRunningTime="2025-09-29 22:43:58.965095255 +0000 UTC m=+1043.275384068" Sep 29 22:43:59 crc kubenswrapper[4922]: I0929 22:43:59.019004 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" podStartSLOduration=3.843259454 podStartE2EDuration="18.018977693s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:42.634583708 +0000 UTC m=+1026.944872521" lastFinishedPulling="2025-09-29 22:43:56.810301937 +0000 UTC m=+1041.120590760" observedRunningTime="2025-09-29 22:43:59.009860323 +0000 UTC m=+1043.320149136" watchObservedRunningTime="2025-09-29 22:43:59.018977693 +0000 UTC m=+1043.329266526" Sep 29 22:43:59 crc kubenswrapper[4922]: I0929 22:43:59.890338 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="92b5767336a72e147921a9d2961a6367ee20762375b4581c376088ef25b4feea" exitCode=0 Sep 29 22:43:59 crc kubenswrapper[4922]: I0929 22:43:59.890434 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"92b5767336a72e147921a9d2961a6367ee20762375b4581c376088ef25b4feea"} Sep 29 22:43:59 crc kubenswrapper[4922]: I0929 22:43:59.890790 4922 scope.go:117] "RemoveContainer" containerID="2fff5dec163c43924ec181a6c7d9ee934e027ea79ccf259ff2b5530d85b03707" Sep 29 22:43:59 crc kubenswrapper[4922]: I0929 22:43:59.892863 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" Sep 29 22:44:00 crc kubenswrapper[4922]: I0929 22:44:00.899547 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" event={"ID":"846b5189-20cf-414a-b682-a2bbc6e184cf","Type":"ContainerStarted","Data":"ac774845a40e31f99707e7f5bdf70cbf808aa39d521e730cc561820010c359af"} Sep 29 22:44:00 crc kubenswrapper[4922]: I0929 22:44:00.900013 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" Sep 29 22:44:00 crc kubenswrapper[4922]: I0929 22:44:00.902281 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" event={"ID":"2fc57cef-6bbd-4925-82a9-0efb9622aa81","Type":"ContainerStarted","Data":"47c38ad84c79dae7e231ead5a2f7be3a3f23e0ea2d110eb77ad9f2c5481177d7"} Sep 29 22:44:00 crc kubenswrapper[4922]: I0929 22:44:00.902730 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" Sep 29 22:44:00 crc kubenswrapper[4922]: I0929 22:44:00.905199 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"2744d35a0efae3434bd56ec391b0648d3824ba8565228dfe9d3610ca7ee648f3"} Sep 29 22:44:00 crc kubenswrapper[4922]: I0929 22:44:00.919020 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" podStartSLOduration=3.930685999 podStartE2EDuration="19.918999677s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.793329534 +0000 UTC m=+1028.103618347" lastFinishedPulling="2025-09-29 22:43:59.781643212 +0000 UTC m=+1044.091932025" observedRunningTime="2025-09-29 22:44:00.918052263 +0000 UTC m=+1045.228341066" watchObservedRunningTime="2025-09-29 22:44:00.918999677 +0000 UTC m=+1045.229288490" Sep 29 22:44:00 crc kubenswrapper[4922]: I0929 22:44:00.951737 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" podStartSLOduration=3.680393323 podStartE2EDuration="19.951715432s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.512297783 +0000 UTC m=+1027.822586596" lastFinishedPulling="2025-09-29 22:43:59.783619892 +0000 UTC m=+1044.093908705" observedRunningTime="2025-09-29 22:44:00.949565377 +0000 UTC m=+1045.259854200" watchObservedRunningTime="2025-09-29 22:44:00.951715432 +0000 UTC m=+1045.262004325" Sep 29 22:44:01 crc kubenswrapper[4922]: I0929 22:44:01.913221 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" event={"ID":"7171ed90-0002-4a34-a417-39a2645e8566","Type":"ContainerStarted","Data":"7ea5046b9d41a55dda3e4ae2193c4e4d4ab10203305d140bbd73fbfe1a0944f9"} Sep 29 22:44:01 crc kubenswrapper[4922]: I0929 22:44:01.913851 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" Sep 29 22:44:01 crc kubenswrapper[4922]: I0929 22:44:01.930523 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" podStartSLOduration=2.805069554 podStartE2EDuration="19.930507579s" podCreationTimestamp="2025-09-29 22:43:42 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.789888799 +0000 UTC m=+1028.100177612" lastFinishedPulling="2025-09-29 22:44:00.915326824 +0000 UTC m=+1045.225615637" observedRunningTime="2025-09-29 22:44:01.927855693 +0000 UTC m=+1046.238144506" watchObservedRunningTime="2025-09-29 22:44:01.930507579 +0000 UTC m=+1046.240796392" Sep 29 22:44:02 crc kubenswrapper[4922]: I0929 22:44:02.023935 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-dpnvq" Sep 29 22:44:02 crc kubenswrapper[4922]: I0929 22:44:02.029632 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-r2mxk" Sep 29 22:44:02 crc kubenswrapper[4922]: I0929 22:44:02.053848 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-dwpq7" Sep 29 22:44:02 crc kubenswrapper[4922]: I0929 22:44:02.171624 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-cx7lp" Sep 29 22:44:02 crc kubenswrapper[4922]: I0929 22:44:02.175069 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-vxc2x" Sep 29 22:44:02 crc kubenswrapper[4922]: I0929 22:44:02.290268 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-zhhc4" Sep 29 22:44:02 crc kubenswrapper[4922]: I0929 22:44:02.511759 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-mcrvk" Sep 29 22:44:02 crc kubenswrapper[4922]: I0929 22:44:02.530151 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-7mtt9" Sep 29 22:44:05 crc kubenswrapper[4922]: I0929 22:44:05.962284 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" event={"ID":"5d6e0d8b-b5bf-49eb-8619-8f60d4177c32","Type":"ContainerStarted","Data":"6f58ed849045450a7b30a2a6310c5bac81f7065f18d2247ac8a67a54355ea537"} Sep 29 22:44:05 crc kubenswrapper[4922]: I0929 22:44:05.967133 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" Sep 29 22:44:05 crc kubenswrapper[4922]: I0929 22:44:05.975903 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" event={"ID":"1e3f1a04-5e07-4c81-93dc-beca7a598caf","Type":"ContainerStarted","Data":"cf8f5ddd249d59e7fdeda2893c1ebe1d6ae1651f5dfa3b7c19bfb381a62a448c"} Sep 29 22:44:05 crc kubenswrapper[4922]: I0929 22:44:05.983695 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" event={"ID":"f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5","Type":"ContainerStarted","Data":"4d22f7089b39cb0489567135a5e087b7c3e44e17062d061f945e21635829942c"} Sep 29 22:44:05 crc kubenswrapper[4922]: I0929 22:44:05.984162 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" Sep 29 22:44:05 crc kubenswrapper[4922]: I0929 22:44:05.987075 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" event={"ID":"9eb5c0db-802b-4f80-ac48-9f1e75e3cebb","Type":"ContainerStarted","Data":"567c8746dd59b56cfbbeb914b05b7a25c57a4946ff3824d50cb3ae489fe80712"} Sep 29 22:44:06 crc kubenswrapper[4922]: I0929 22:44:06.003134 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" podStartSLOduration=4.052309337 podStartE2EDuration="25.00311343s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.792252017 +0000 UTC m=+1028.102540830" lastFinishedPulling="2025-09-29 22:44:04.74305612 +0000 UTC m=+1049.053344923" observedRunningTime="2025-09-29 22:44:06.000129515 +0000 UTC m=+1050.310418388" watchObservedRunningTime="2025-09-29 22:44:06.00311343 +0000 UTC m=+1050.313402253" Sep 29 22:44:06 crc kubenswrapper[4922]: I0929 22:44:06.031719 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-8blrf" podStartSLOduration=3.018788181 podStartE2EDuration="24.03169032s" podCreationTimestamp="2025-09-29 22:43:42 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.792446542 +0000 UTC m=+1028.102735355" lastFinishedPulling="2025-09-29 22:44:04.805348681 +0000 UTC m=+1049.115637494" observedRunningTime="2025-09-29 22:44:06.021870083 +0000 UTC m=+1050.332158936" watchObservedRunningTime="2025-09-29 22:44:06.03169032 +0000 UTC m=+1050.341979173" Sep 29 22:44:06 crc kubenswrapper[4922]: I0929 22:44:06.051139 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" podStartSLOduration=4.101966217 podStartE2EDuration="25.05110495s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.795360364 +0000 UTC m=+1028.105649177" lastFinishedPulling="2025-09-29 22:44:04.744499097 +0000 UTC m=+1049.054787910" observedRunningTime="2025-09-29 22:44:06.043625901 +0000 UTC m=+1050.353914724" watchObservedRunningTime="2025-09-29 22:44:06.05110495 +0000 UTC m=+1050.361393833" Sep 29 22:44:06 crc kubenswrapper[4922]: I0929 22:44:06.065120 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" podStartSLOduration=3.846162586 podStartE2EDuration="25.065090312s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.522958425 +0000 UTC m=+1027.833247238" lastFinishedPulling="2025-09-29 22:44:04.741886121 +0000 UTC m=+1049.052174964" observedRunningTime="2025-09-29 22:44:06.059989294 +0000 UTC m=+1050.370278157" watchObservedRunningTime="2025-09-29 22:44:06.065090312 +0000 UTC m=+1050.375379175" Sep 29 22:44:10 crc kubenswrapper[4922]: I0929 22:44:10.427186 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 22:44:11 crc kubenswrapper[4922]: I0929 22:44:11.719844 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-bfzxs" Sep 29 22:44:11 crc kubenswrapper[4922]: I0929 22:44:11.732600 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-dtws2" Sep 29 22:44:11 crc kubenswrapper[4922]: I0929 22:44:11.826119 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-nvw97" Sep 29 22:44:11 crc kubenswrapper[4922]: I0929 22:44:11.918297 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-tplbn" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.050370 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" event={"ID":"87c9a6b1-e1ea-41dc-b77f-67b22bc39517","Type":"ContainerStarted","Data":"98f0f6b17998a122a7793c07ff94dcf42ec575df5017c1b2a56baabd3bbb3241"} Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.050915 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.052673 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" event={"ID":"44863fa1-d920-42fc-a5d2-197762fe8c37","Type":"ContainerStarted","Data":"71d981ef2090dae435e9f55b8663c004765af6aa986920a7aed9ef85df27477d"} Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.052888 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.069569 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" podStartSLOduration=2.714303039 podStartE2EDuration="31.069552089s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.474275447 +0000 UTC m=+1027.784564260" lastFinishedPulling="2025-09-29 22:44:11.829524497 +0000 UTC m=+1056.139813310" observedRunningTime="2025-09-29 22:44:12.065897497 +0000 UTC m=+1056.376186320" watchObservedRunningTime="2025-09-29 22:44:12.069552089 +0000 UTC m=+1056.379840902" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.085375 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" podStartSLOduration=3.561767964 podStartE2EDuration="31.085359167s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.493604893 +0000 UTC m=+1027.803893706" lastFinishedPulling="2025-09-29 22:44:11.017196086 +0000 UTC m=+1055.327484909" observedRunningTime="2025-09-29 22:44:12.083847559 +0000 UTC m=+1056.394136372" watchObservedRunningTime="2025-09-29 22:44:12.085359167 +0000 UTC m=+1056.395647980" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.372714 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.375947 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-qxlqv" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.684428 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-wg8cr" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.714794 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-dq4dx" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.757810 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-8kgtj" Sep 29 22:44:12 crc kubenswrapper[4922]: I0929 22:44:12.788321 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-lr8wj" Sep 29 22:44:14 crc kubenswrapper[4922]: I0929 22:44:14.077238 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" event={"ID":"10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89","Type":"ContainerStarted","Data":"856e96938f312796fb4548f7d8a1dfc62ab03b0523d398b6565d07c869eb1498"} Sep 29 22:44:14 crc kubenswrapper[4922]: I0929 22:44:14.078927 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" Sep 29 22:44:14 crc kubenswrapper[4922]: I0929 22:44:14.103613 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" podStartSLOduration=3.586473803 podStartE2EDuration="33.103587392s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:43.467205444 +0000 UTC m=+1027.777494257" lastFinishedPulling="2025-09-29 22:44:12.984319023 +0000 UTC m=+1057.294607846" observedRunningTime="2025-09-29 22:44:14.100057033 +0000 UTC m=+1058.410345896" watchObservedRunningTime="2025-09-29 22:44:14.103587392 +0000 UTC m=+1058.413876245" Sep 29 22:44:15 crc kubenswrapper[4922]: I0929 22:44:15.089386 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" event={"ID":"2dc7bdb9-eab6-4497-8888-adadebf30b1a","Type":"ContainerStarted","Data":"93428b0dea3f6ca10df91dc50c50cac1575d6346460d284f43291329f995df36"} Sep 29 22:44:15 crc kubenswrapper[4922]: I0929 22:44:15.089827 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:44:15 crc kubenswrapper[4922]: I0929 22:44:15.141433 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" podStartSLOduration=4.848635123 podStartE2EDuration="34.141383617s" podCreationTimestamp="2025-09-29 22:43:41 +0000 UTC" firstStartedPulling="2025-09-29 22:43:44.565756359 +0000 UTC m=+1028.876045172" lastFinishedPulling="2025-09-29 22:44:13.858504803 +0000 UTC m=+1058.168793666" observedRunningTime="2025-09-29 22:44:15.141046808 +0000 UTC m=+1059.451335651" watchObservedRunningTime="2025-09-29 22:44:15.141383617 +0000 UTC m=+1059.451672470" Sep 29 22:44:21 crc kubenswrapper[4922]: I0929 22:44:21.796807 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-7b99q" Sep 29 22:44:21 crc kubenswrapper[4922]: I0929 22:44:21.810274 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-trqhs" Sep 29 22:44:22 crc kubenswrapper[4922]: I0929 22:44:22.224177 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-czjxj" Sep 29 22:44:23 crc kubenswrapper[4922]: I0929 22:44:23.825528 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-6txmj" Sep 29 22:44:40 crc kubenswrapper[4922]: I0929 22:44:40.939726 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-bwp9j"] Sep 29 22:44:40 crc kubenswrapper[4922]: I0929 22:44:40.941420 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:44:40 crc kubenswrapper[4922]: I0929 22:44:40.944708 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 29 22:44:40 crc kubenswrapper[4922]: I0929 22:44:40.944767 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-2xnvb" Sep 29 22:44:40 crc kubenswrapper[4922]: I0929 22:44:40.944717 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 29 22:44:40 crc kubenswrapper[4922]: I0929 22:44:40.946037 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 29 22:44:40 crc kubenswrapper[4922]: I0929 22:44:40.957043 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-bwp9j"] Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.009148 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p48qw"] Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.010263 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.012513 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.020962 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p48qw"] Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.097342 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-config\") pod \"dnsmasq-dns-78dd6ddcc-p48qw\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.097401 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns9g9\" (UniqueName: \"kubernetes.io/projected/b629dfb8-0bd1-4af7-a753-3314f26624c1-kube-api-access-ns9g9\") pod \"dnsmasq-dns-675f4bcbfc-bwp9j\" (UID: \"b629dfb8-0bd1-4af7-a753-3314f26624c1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.097452 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-p48qw\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.097471 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46brr\" (UniqueName: \"kubernetes.io/projected/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-kube-api-access-46brr\") pod \"dnsmasq-dns-78dd6ddcc-p48qw\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.097491 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b629dfb8-0bd1-4af7-a753-3314f26624c1-config\") pod \"dnsmasq-dns-675f4bcbfc-bwp9j\" (UID: \"b629dfb8-0bd1-4af7-a753-3314f26624c1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.198385 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns9g9\" (UniqueName: \"kubernetes.io/projected/b629dfb8-0bd1-4af7-a753-3314f26624c1-kube-api-access-ns9g9\") pod \"dnsmasq-dns-675f4bcbfc-bwp9j\" (UID: \"b629dfb8-0bd1-4af7-a753-3314f26624c1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.198983 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-p48qw\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.200314 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-p48qw\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.200495 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46brr\" (UniqueName: \"kubernetes.io/projected/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-kube-api-access-46brr\") pod \"dnsmasq-dns-78dd6ddcc-p48qw\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.200551 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b629dfb8-0bd1-4af7-a753-3314f26624c1-config\") pod \"dnsmasq-dns-675f4bcbfc-bwp9j\" (UID: \"b629dfb8-0bd1-4af7-a753-3314f26624c1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.200669 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-config\") pod \"dnsmasq-dns-78dd6ddcc-p48qw\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.201725 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b629dfb8-0bd1-4af7-a753-3314f26624c1-config\") pod \"dnsmasq-dns-675f4bcbfc-bwp9j\" (UID: \"b629dfb8-0bd1-4af7-a753-3314f26624c1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.201785 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-config\") pod \"dnsmasq-dns-78dd6ddcc-p48qw\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.221475 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns9g9\" (UniqueName: \"kubernetes.io/projected/b629dfb8-0bd1-4af7-a753-3314f26624c1-kube-api-access-ns9g9\") pod \"dnsmasq-dns-675f4bcbfc-bwp9j\" (UID: \"b629dfb8-0bd1-4af7-a753-3314f26624c1\") " pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.229003 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46brr\" (UniqueName: \"kubernetes.io/projected/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-kube-api-access-46brr\") pod \"dnsmasq-dns-78dd6ddcc-p48qw\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.262342 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.323019 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.750502 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-bwp9j"] Sep 29 22:44:41 crc kubenswrapper[4922]: W0929 22:44:41.761123 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb629dfb8_0bd1_4af7_a753_3314f26624c1.slice/crio-746849a206691a8804069e5340ba4b2d56bec1a4bda347b4475c74b5311860da WatchSource:0}: Error finding container 746849a206691a8804069e5340ba4b2d56bec1a4bda347b4475c74b5311860da: Status 404 returned error can't find the container with id 746849a206691a8804069e5340ba4b2d56bec1a4bda347b4475c74b5311860da Sep 29 22:44:41 crc kubenswrapper[4922]: I0929 22:44:41.810392 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p48qw"] Sep 29 22:44:41 crc kubenswrapper[4922]: W0929 22:44:41.819095 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podefc7fea0_1d2e_4ee0_868b_adee7cdb3eea.slice/crio-cdd2def1568f0fdf82ee761c6eb6b918b991c10f655d42118f4968e57ff4f26a WatchSource:0}: Error finding container cdd2def1568f0fdf82ee761c6eb6b918b991c10f655d42118f4968e57ff4f26a: Status 404 returned error can't find the container with id cdd2def1568f0fdf82ee761c6eb6b918b991c10f655d42118f4968e57ff4f26a Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.003884 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-bwp9j"] Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.026297 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rntq2"] Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.027709 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.042315 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rntq2"] Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.113395 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kbgg\" (UniqueName: \"kubernetes.io/projected/8fc7f380-d4f6-4713-953e-d97162e9e4e6-kube-api-access-5kbgg\") pod \"dnsmasq-dns-666b6646f7-rntq2\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.113496 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-config\") pod \"dnsmasq-dns-666b6646f7-rntq2\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.113547 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-dns-svc\") pod \"dnsmasq-dns-666b6646f7-rntq2\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.214662 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-config\") pod \"dnsmasq-dns-666b6646f7-rntq2\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.214765 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-dns-svc\") pod \"dnsmasq-dns-666b6646f7-rntq2\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.214822 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kbgg\" (UniqueName: \"kubernetes.io/projected/8fc7f380-d4f6-4713-953e-d97162e9e4e6-kube-api-access-5kbgg\") pod \"dnsmasq-dns-666b6646f7-rntq2\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.215820 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-dns-svc\") pod \"dnsmasq-dns-666b6646f7-rntq2\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.215912 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-config\") pod \"dnsmasq-dns-666b6646f7-rntq2\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.233262 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kbgg\" (UniqueName: \"kubernetes.io/projected/8fc7f380-d4f6-4713-953e-d97162e9e4e6-kube-api-access-5kbgg\") pod \"dnsmasq-dns-666b6646f7-rntq2\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.330224 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" event={"ID":"b629dfb8-0bd1-4af7-a753-3314f26624c1","Type":"ContainerStarted","Data":"746849a206691a8804069e5340ba4b2d56bec1a4bda347b4475c74b5311860da"} Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.334033 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" event={"ID":"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea","Type":"ContainerStarted","Data":"cdd2def1568f0fdf82ee761c6eb6b918b991c10f655d42118f4968e57ff4f26a"} Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.346253 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.714132 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p48qw"] Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.741780 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bcx8x"] Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.743012 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.748925 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bcx8x"] Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.824580 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-bcx8x\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.824671 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-config\") pod \"dnsmasq-dns-57d769cc4f-bcx8x\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.824763 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jfb7\" (UniqueName: \"kubernetes.io/projected/086d2221-5075-45af-8bfb-c7b1908f410d-kube-api-access-5jfb7\") pod \"dnsmasq-dns-57d769cc4f-bcx8x\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.843000 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rntq2"] Sep 29 22:44:42 crc kubenswrapper[4922]: W0929 22:44:42.853415 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fc7f380_d4f6_4713_953e_d97162e9e4e6.slice/crio-be66a688666ffff56cd5d116cc7b8aafcf13bf54dfecfb6d7f5bdf2b139f4797 WatchSource:0}: Error finding container be66a688666ffff56cd5d116cc7b8aafcf13bf54dfecfb6d7f5bdf2b139f4797: Status 404 returned error can't find the container with id be66a688666ffff56cd5d116cc7b8aafcf13bf54dfecfb6d7f5bdf2b139f4797 Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.925824 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-config\") pod \"dnsmasq-dns-57d769cc4f-bcx8x\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.925866 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jfb7\" (UniqueName: \"kubernetes.io/projected/086d2221-5075-45af-8bfb-c7b1908f410d-kube-api-access-5jfb7\") pod \"dnsmasq-dns-57d769cc4f-bcx8x\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.925933 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-bcx8x\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.926852 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-bcx8x\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.928759 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-config\") pod \"dnsmasq-dns-57d769cc4f-bcx8x\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:42 crc kubenswrapper[4922]: I0929 22:44:42.946989 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jfb7\" (UniqueName: \"kubernetes.io/projected/086d2221-5075-45af-8bfb-c7b1908f410d-kube-api-access-5jfb7\") pod \"dnsmasq-dns-57d769cc4f-bcx8x\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.063742 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.161851 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.163018 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.166187 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.166583 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.167085 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.167224 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.167453 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-5947v" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.168392 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.170731 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.175740 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341050 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341318 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341344 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e56d31de-64f5-42a7-8243-7ac6d992a03d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341367 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341403 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341431 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e56d31de-64f5-42a7-8243-7ac6d992a03d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341448 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341475 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341509 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341530 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.341552 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkcsp\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-kube-api-access-pkcsp\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.371064 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rntq2" event={"ID":"8fc7f380-d4f6-4713-953e-d97162e9e4e6","Type":"ContainerStarted","Data":"be66a688666ffff56cd5d116cc7b8aafcf13bf54dfecfb6d7f5bdf2b139f4797"} Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.422453 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bcx8x"] Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445124 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445200 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445228 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445243 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e56d31de-64f5-42a7-8243-7ac6d992a03d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445273 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445310 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445332 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445353 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkcsp\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-kube-api-access-pkcsp\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445377 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445399 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.445440 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e56d31de-64f5-42a7-8243-7ac6d992a03d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.446484 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.447222 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.449813 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.450267 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.450535 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: W0929 22:44:43.453246 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod086d2221_5075_45af_8bfb_c7b1908f410d.slice/crio-2956e1698ce5edcc96a63c3f8ea85345dfe433f033aa15f4a3a1913c527e3859 WatchSource:0}: Error finding container 2956e1698ce5edcc96a63c3f8ea85345dfe433f033aa15f4a3a1913c527e3859: Status 404 returned error can't find the container with id 2956e1698ce5edcc96a63c3f8ea85345dfe433f033aa15f4a3a1913c527e3859 Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.456740 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e56d31de-64f5-42a7-8243-7ac6d992a03d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.459026 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.460239 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e56d31de-64f5-42a7-8243-7ac6d992a03d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.460594 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.463533 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.465921 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkcsp\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-kube-api-access-pkcsp\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.474653 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.495004 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.882563 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.883973 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.889201 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.889440 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-s4mpv" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.889614 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.889771 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.890381 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.891623 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.892783 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 22:44:43 crc kubenswrapper[4922]: I0929 22:44:43.898157 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051505 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051548 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cab5f5be-6bdd-481b-a07b-08491f6f2be5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051588 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cab5f5be-6bdd-481b-a07b-08491f6f2be5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051613 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051671 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051694 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051722 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051765 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051789 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051827 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-227pz\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-kube-api-access-227pz\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.051850 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.069264 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 22:44:44 crc kubenswrapper[4922]: W0929 22:44:44.116656 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode56d31de_64f5_42a7_8243_7ac6d992a03d.slice/crio-77c1cf89ced628c70e19a175a9798b05aadd885cda871ffdb601dbf1fcaff309 WatchSource:0}: Error finding container 77c1cf89ced628c70e19a175a9798b05aadd885cda871ffdb601dbf1fcaff309: Status 404 returned error can't find the container with id 77c1cf89ced628c70e19a175a9798b05aadd885cda871ffdb601dbf1fcaff309 Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153519 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153580 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153607 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153633 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153662 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153688 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153709 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-227pz\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-kube-api-access-227pz\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153735 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153757 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153777 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cab5f5be-6bdd-481b-a07b-08491f6f2be5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.153794 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cab5f5be-6bdd-481b-a07b-08491f6f2be5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.154871 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.154979 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.155141 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.155697 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.156196 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.157803 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.159952 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.160476 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cab5f5be-6bdd-481b-a07b-08491f6f2be5-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.163245 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cab5f5be-6bdd-481b-a07b-08491f6f2be5-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.168289 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.173545 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-227pz\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-kube-api-access-227pz\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.179898 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.209787 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.446746 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" event={"ID":"086d2221-5075-45af-8bfb-c7b1908f410d","Type":"ContainerStarted","Data":"2956e1698ce5edcc96a63c3f8ea85345dfe433f033aa15f4a3a1913c527e3859"} Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.447021 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e56d31de-64f5-42a7-8243-7ac6d992a03d","Type":"ContainerStarted","Data":"77c1cf89ced628c70e19a175a9798b05aadd885cda871ffdb601dbf1fcaff309"} Sep 29 22:44:44 crc kubenswrapper[4922]: I0929 22:44:44.742101 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 22:44:44 crc kubenswrapper[4922]: W0929 22:44:44.756941 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcab5f5be_6bdd_481b_a07b_08491f6f2be5.slice/crio-b1d678a6d50424266b45f4e897abe44d2265e6bab757ecfa67241b7bd7b65447 WatchSource:0}: Error finding container b1d678a6d50424266b45f4e897abe44d2265e6bab757ecfa67241b7bd7b65447: Status 404 returned error can't find the container with id b1d678a6d50424266b45f4e897abe44d2265e6bab757ecfa67241b7bd7b65447 Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.321419 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.322880 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.326133 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.326991 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.327232 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.340445 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-kt9xw" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.340940 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.361981 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.362027 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.458638 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cab5f5be-6bdd-481b-a07b-08491f6f2be5","Type":"ContainerStarted","Data":"b1d678a6d50424266b45f4e897abe44d2265e6bab757ecfa67241b7bd7b65447"} Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.491729 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5ghp\" (UniqueName: \"kubernetes.io/projected/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kube-api-access-r5ghp\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.491769 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-secrets\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.491805 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-default\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.491826 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-operator-scripts\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.491842 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.491864 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.491893 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.491907 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-generated\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.491923 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kolla-config\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.602026 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5ghp\" (UniqueName: \"kubernetes.io/projected/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kube-api-access-r5ghp\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.602086 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-secrets\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.602159 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-default\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.602187 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-operator-scripts\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.602202 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.602600 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.602722 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.602740 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-generated\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.602761 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kolla-config\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.603906 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.604418 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-default\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.605362 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-operator-scripts\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.616607 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-generated\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.619341 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.619383 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.619972 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-secrets\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.619983 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kolla-config\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.625876 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5ghp\" (UniqueName: \"kubernetes.io/projected/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kube-api-access-r5ghp\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.633602 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " pod="openstack/openstack-galera-0" Sep 29 22:44:45 crc kubenswrapper[4922]: I0929 22:44:45.664484 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.351765 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 22:44:46 crc kubenswrapper[4922]: W0929 22:44:46.370199 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod866ac5e5_219a_4afa_b6b3_0ca293c81f1d.slice/crio-80221817c0247a5eb947fcfbca2c7a261517d2e63942374268eab7554e0722e4 WatchSource:0}: Error finding container 80221817c0247a5eb947fcfbca2c7a261517d2e63942374268eab7554e0722e4: Status 404 returned error can't find the container with id 80221817c0247a5eb947fcfbca2c7a261517d2e63942374268eab7554e0722e4 Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.474127 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"866ac5e5-219a-4afa-b6b3-0ca293c81f1d","Type":"ContainerStarted","Data":"80221817c0247a5eb947fcfbca2c7a261517d2e63942374268eab7554e0722e4"} Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.687355 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.689023 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.693271 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.694224 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.694616 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.696215 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-bhjqg" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.714933 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.834172 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.834229 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.834367 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.834429 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.834529 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.834553 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.834610 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.834675 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.834700 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfqvh\" (UniqueName: \"kubernetes.io/projected/aa85a019-83a6-4b71-abdb-7144be0105ae-kube-api-access-nfqvh\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.935570 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.935610 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfqvh\" (UniqueName: \"kubernetes.io/projected/aa85a019-83a6-4b71-abdb-7144be0105ae-kube-api-access-nfqvh\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.935648 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.935673 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.935704 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.935722 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.935754 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.935769 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.935788 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.936660 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.939355 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.940022 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.940322 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.940846 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.946637 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.946763 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.950877 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfqvh\" (UniqueName: \"kubernetes.io/projected/aa85a019-83a6-4b71-abdb-7144be0105ae-kube-api-access-nfqvh\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.961105 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:46 crc kubenswrapper[4922]: I0929 22:44:46.962639 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.014784 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.074155 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.076176 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.079078 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-rsd9j" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.079259 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.079380 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.086308 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.157414 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64df5\" (UniqueName: \"kubernetes.io/projected/1654e799-40ef-413a-8324-bb5b4f7a8f17-kube-api-access-64df5\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.157464 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-combined-ca-bundle\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.157529 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-config-data\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.157551 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-kolla-config\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.157572 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-memcached-tls-certs\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.257975 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64df5\" (UniqueName: \"kubernetes.io/projected/1654e799-40ef-413a-8324-bb5b4f7a8f17-kube-api-access-64df5\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.258339 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-combined-ca-bundle\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.258405 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-config-data\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.258430 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-kolla-config\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.258446 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-memcached-tls-certs\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.259180 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-config-data\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.259285 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-kolla-config\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.266912 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-memcached-tls-certs\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.287117 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64df5\" (UniqueName: \"kubernetes.io/projected/1654e799-40ef-413a-8324-bb5b4f7a8f17-kube-api-access-64df5\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.287294 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-combined-ca-bundle\") pod \"memcached-0\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.491374 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 22:44:47 crc kubenswrapper[4922]: I0929 22:44:47.565284 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 22:44:47 crc kubenswrapper[4922]: W0929 22:44:47.628344 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa85a019_83a6_4b71_abdb_7144be0105ae.slice/crio-9016f9e19bef00c2a06f46e5f6cb599045903a16c5e1959454384154153a7943 WatchSource:0}: Error finding container 9016f9e19bef00c2a06f46e5f6cb599045903a16c5e1959454384154153a7943: Status 404 returned error can't find the container with id 9016f9e19bef00c2a06f46e5f6cb599045903a16c5e1959454384154153a7943 Sep 29 22:44:48 crc kubenswrapper[4922]: I0929 22:44:48.072484 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 22:44:48 crc kubenswrapper[4922]: I0929 22:44:48.504040 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1654e799-40ef-413a-8324-bb5b4f7a8f17","Type":"ContainerStarted","Data":"5a36860f0e2452601f8f54e267156ad50e136c84c32898f42e31fa0eecde70e4"} Sep 29 22:44:48 crc kubenswrapper[4922]: I0929 22:44:48.505318 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"aa85a019-83a6-4b71-abdb-7144be0105ae","Type":"ContainerStarted","Data":"9016f9e19bef00c2a06f46e5f6cb599045903a16c5e1959454384154153a7943"} Sep 29 22:44:48 crc kubenswrapper[4922]: I0929 22:44:48.732282 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:44:48 crc kubenswrapper[4922]: I0929 22:44:48.734126 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:44:48 crc kubenswrapper[4922]: I0929 22:44:48.736786 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:44:48 crc kubenswrapper[4922]: I0929 22:44:48.767128 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-2dlvl" Sep 29 22:44:48 crc kubenswrapper[4922]: I0929 22:44:48.887175 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhhp8\" (UniqueName: \"kubernetes.io/projected/bd0ad759-f12c-454b-b9e3-c2a58ccf74e3-kube-api-access-mhhp8\") pod \"kube-state-metrics-0\" (UID: \"bd0ad759-f12c-454b-b9e3-c2a58ccf74e3\") " pod="openstack/kube-state-metrics-0" Sep 29 22:44:48 crc kubenswrapper[4922]: I0929 22:44:48.988251 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhhp8\" (UniqueName: \"kubernetes.io/projected/bd0ad759-f12c-454b-b9e3-c2a58ccf74e3-kube-api-access-mhhp8\") pod \"kube-state-metrics-0\" (UID: \"bd0ad759-f12c-454b-b9e3-c2a58ccf74e3\") " pod="openstack/kube-state-metrics-0" Sep 29 22:44:49 crc kubenswrapper[4922]: I0929 22:44:49.029493 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhhp8\" (UniqueName: \"kubernetes.io/projected/bd0ad759-f12c-454b-b9e3-c2a58ccf74e3-kube-api-access-mhhp8\") pod \"kube-state-metrics-0\" (UID: \"bd0ad759-f12c-454b-b9e3-c2a58ccf74e3\") " pod="openstack/kube-state-metrics-0" Sep 29 22:44:49 crc kubenswrapper[4922]: I0929 22:44:49.108056 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.289515 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-nrr6k"] Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.290828 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.293890 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.294160 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.294208 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-8gtgg" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.306255 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nrr6k"] Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.328072 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-4jkkx"] Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.330274 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.348497 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-4jkkx"] Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486283 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486337 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-log\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486362 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-run\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486428 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-log-ovn\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486452 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4594a140-3321-4a34-ab35-65ad3560b085-scripts\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486467 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czb6m\" (UniqueName: \"kubernetes.io/projected/217b822b-44c6-465e-982a-23fa07d94b58-kube-api-access-czb6m\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486488 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-ovn-controller-tls-certs\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486517 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-etc-ovs\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486535 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmghh\" (UniqueName: \"kubernetes.io/projected/4594a140-3321-4a34-ab35-65ad3560b085-kube-api-access-zmghh\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486551 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-lib\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486570 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/217b822b-44c6-465e-982a-23fa07d94b58-scripts\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486584 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-combined-ca-bundle\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.486604 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run-ovn\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.587978 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-log-ovn\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588034 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4594a140-3321-4a34-ab35-65ad3560b085-scripts\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588051 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czb6m\" (UniqueName: \"kubernetes.io/projected/217b822b-44c6-465e-982a-23fa07d94b58-kube-api-access-czb6m\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588096 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-ovn-controller-tls-certs\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588135 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-etc-ovs\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588151 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmghh\" (UniqueName: \"kubernetes.io/projected/4594a140-3321-4a34-ab35-65ad3560b085-kube-api-access-zmghh\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588167 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-lib\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588191 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/217b822b-44c6-465e-982a-23fa07d94b58-scripts\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588224 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-combined-ca-bundle\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588256 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run-ovn\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588279 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588304 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-log\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588325 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-run\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588631 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-run\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588701 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-log-ovn\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588904 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run-ovn\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.588944 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.589016 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-log\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.589909 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-lib\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.590052 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-etc-ovs\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.590550 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4594a140-3321-4a34-ab35-65ad3560b085-scripts\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.590966 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/217b822b-44c6-465e-982a-23fa07d94b58-scripts\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.595176 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-ovn-controller-tls-certs\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.598082 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-combined-ca-bundle\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.613253 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmghh\" (UniqueName: \"kubernetes.io/projected/4594a140-3321-4a34-ab35-65ad3560b085-kube-api-access-zmghh\") pod \"ovn-controller-ovs-4jkkx\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.626378 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czb6m\" (UniqueName: \"kubernetes.io/projected/217b822b-44c6-465e-982a-23fa07d94b58-kube-api-access-czb6m\") pod \"ovn-controller-nrr6k\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.630332 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.631994 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.636574 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.636731 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.636758 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.637264 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-w77bf" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.637326 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.639240 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.647900 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.790463 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.790504 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.790526 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-config\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.790551 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.790575 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bce38540-6796-48b5-82e7-aad30cf98841-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.790605 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.790620 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.790671 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-946jd\" (UniqueName: \"kubernetes.io/projected/bce38540-6796-48b5-82e7-aad30cf98841-kube-api-access-946jd\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.820063 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.822233 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.825305 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.825452 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.825633 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-klpj4" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.825710 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.827074 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.891859 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-946jd\" (UniqueName: \"kubernetes.io/projected/bce38540-6796-48b5-82e7-aad30cf98841-kube-api-access-946jd\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.891962 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.892034 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.892055 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-config\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.892191 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.892217 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bce38540-6796-48b5-82e7-aad30cf98841-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.892250 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.892721 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.892924 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bce38540-6796-48b5-82e7-aad30cf98841-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.893020 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-config\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.893076 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.893616 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.897585 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.898113 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.898712 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.907800 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-946jd\" (UniqueName: \"kubernetes.io/projected/bce38540-6796-48b5-82e7-aad30cf98841-kube-api-access-946jd\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.918253 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.920753 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nrr6k" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.972571 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.994360 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.994462 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-config\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.994511 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.994542 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.994595 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgxfx\" (UniqueName: \"kubernetes.io/projected/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-kube-api-access-qgxfx\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.994624 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.994681 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:54 crc kubenswrapper[4922]: I0929 22:44:54.994706 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096068 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096109 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096147 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096187 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-config\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096227 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096251 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096288 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgxfx\" (UniqueName: \"kubernetes.io/projected/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-kube-api-access-qgxfx\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096308 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096506 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.096772 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.097336 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.097911 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-config\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.099689 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.100653 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.101541 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.120079 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgxfx\" (UniqueName: \"kubernetes.io/projected/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-kube-api-access-qgxfx\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.128792 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " pod="openstack/ovsdbserver-sb-0" Sep 29 22:44:55 crc kubenswrapper[4922]: I0929 22:44:55.140823 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.146822 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4"] Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.149048 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.151713 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.152427 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.156672 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4"] Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.288369 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b5656b9-b6f8-4707-b988-b3bbc24986b2-config-volume\") pod \"collect-profiles-29319765-fz9h4\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.288660 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66vq7\" (UniqueName: \"kubernetes.io/projected/4b5656b9-b6f8-4707-b988-b3bbc24986b2-kube-api-access-66vq7\") pod \"collect-profiles-29319765-fz9h4\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.288708 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b5656b9-b6f8-4707-b988-b3bbc24986b2-secret-volume\") pod \"collect-profiles-29319765-fz9h4\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.390740 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66vq7\" (UniqueName: \"kubernetes.io/projected/4b5656b9-b6f8-4707-b988-b3bbc24986b2-kube-api-access-66vq7\") pod \"collect-profiles-29319765-fz9h4\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.390896 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b5656b9-b6f8-4707-b988-b3bbc24986b2-secret-volume\") pod \"collect-profiles-29319765-fz9h4\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.391045 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b5656b9-b6f8-4707-b988-b3bbc24986b2-config-volume\") pod \"collect-profiles-29319765-fz9h4\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.392769 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b5656b9-b6f8-4707-b988-b3bbc24986b2-config-volume\") pod \"collect-profiles-29319765-fz9h4\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.410207 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b5656b9-b6f8-4707-b988-b3bbc24986b2-secret-volume\") pod \"collect-profiles-29319765-fz9h4\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.419663 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66vq7\" (UniqueName: \"kubernetes.io/projected/4b5656b9-b6f8-4707-b988-b3bbc24986b2-kube-api-access-66vq7\") pod \"collect-profiles-29319765-fz9h4\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:00 crc kubenswrapper[4922]: I0929 22:45:00.484362 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:08 crc kubenswrapper[4922]: E0929 22:45:08.217860 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Sep 29 22:45:08 crc kubenswrapper[4922]: E0929 22:45:08.218513 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:DB_ROOT_PASSWORD,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:DbRootPassword,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secrets,ReadOnly:true,MountPath:/var/lib/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r5ghp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(866ac5e5-219a-4afa-b6b3-0ca293c81f1d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:45:08 crc kubenswrapper[4922]: E0929 22:45:08.219738 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" Sep 29 22:45:08 crc kubenswrapper[4922]: E0929 22:45:08.236383 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Sep 29 22:45:08 crc kubenswrapper[4922]: E0929 22:45:08.236558 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pkcsp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(e56d31de-64f5-42a7-8243-7ac6d992a03d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:45:08 crc kubenswrapper[4922]: E0929 22:45:08.237764 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="e56d31de-64f5-42a7-8243-7ac6d992a03d" Sep 29 22:45:08 crc kubenswrapper[4922]: E0929 22:45:08.693636 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.072627 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.072833 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:DB_ROOT_PASSWORD,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:DbRootPassword,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secrets,ReadOnly:true,MountPath:/var/lib/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nfqvh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(aa85a019-83a6-4b71-abdb-7144be0105ae): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.073711 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.073867 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5jfb7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-bcx8x_openstack(086d2221-5075-45af-8bfb-c7b1908f410d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.074094 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.075494 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" podUID="086d2221-5075-45af-8bfb-c7b1908f410d" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.119509 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.119732 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-46brr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-p48qw_openstack(efc7fea0-1d2e-4ee0-868b-adee7cdb3eea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.121400 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" podUID="efc7fea0-1d2e-4ee0-868b-adee7cdb3eea" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.125829 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.126042 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ns9g9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-bwp9j_openstack(b629dfb8-0bd1-4af7-a753-3314f26624c1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.127365 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" podUID="b629dfb8-0bd1-4af7-a753-3314f26624c1" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.138033 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.138196 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5kbgg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-rntq2_openstack(8fc7f380-d4f6-4713-953e-d97162e9e4e6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.139407 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-rntq2" podUID="8fc7f380-d4f6-4713-953e-d97162e9e4e6" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.700650 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" podUID="086d2221-5075-45af-8bfb-c7b1908f410d" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.701929 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.702039 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-rntq2" podUID="8fc7f380-d4f6-4713-953e-d97162e9e4e6" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.798569 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.798709 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n648h8bh548h89h5cdh5dbh65dh5b6h74h85h569h94h579h68ch55ch67h558h57chd4h68bh5f8h566h648h67bh5d6hb9h56chbdhbch75hfdhd7q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-64df5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(1654e799-40ef-413a-8324-bb5b4f7a8f17): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:45:09 crc kubenswrapper[4922]: E0929 22:45:09.802106 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="1654e799-40ef-413a-8324-bb5b4f7a8f17" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.096250 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.148587 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.177489 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-dns-svc\") pod \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.177842 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46brr\" (UniqueName: \"kubernetes.io/projected/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-kube-api-access-46brr\") pod \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.177885 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-config\") pod \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\" (UID: \"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea\") " Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.178087 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "efc7fea0-1d2e-4ee0-868b-adee7cdb3eea" (UID: "efc7fea0-1d2e-4ee0-868b-adee7cdb3eea"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.178317 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.178441 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-config" (OuterVolumeSpecName: "config") pod "efc7fea0-1d2e-4ee0-868b-adee7cdb3eea" (UID: "efc7fea0-1d2e-4ee0-868b-adee7cdb3eea"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.183161 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-kube-api-access-46brr" (OuterVolumeSpecName: "kube-api-access-46brr") pod "efc7fea0-1d2e-4ee0-868b-adee7cdb3eea" (UID: "efc7fea0-1d2e-4ee0-868b-adee7cdb3eea"). InnerVolumeSpecName "kube-api-access-46brr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.284499 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b629dfb8-0bd1-4af7-a753-3314f26624c1-config\") pod \"b629dfb8-0bd1-4af7-a753-3314f26624c1\" (UID: \"b629dfb8-0bd1-4af7-a753-3314f26624c1\") " Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.284578 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ns9g9\" (UniqueName: \"kubernetes.io/projected/b629dfb8-0bd1-4af7-a753-3314f26624c1-kube-api-access-ns9g9\") pod \"b629dfb8-0bd1-4af7-a753-3314f26624c1\" (UID: \"b629dfb8-0bd1-4af7-a753-3314f26624c1\") " Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.284911 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.284922 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46brr\" (UniqueName: \"kubernetes.io/projected/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea-kube-api-access-46brr\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.285038 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b629dfb8-0bd1-4af7-a753-3314f26624c1-config" (OuterVolumeSpecName: "config") pod "b629dfb8-0bd1-4af7-a753-3314f26624c1" (UID: "b629dfb8-0bd1-4af7-a753-3314f26624c1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.291615 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b629dfb8-0bd1-4af7-a753-3314f26624c1-kube-api-access-ns9g9" (OuterVolumeSpecName: "kube-api-access-ns9g9") pod "b629dfb8-0bd1-4af7-a753-3314f26624c1" (UID: "b629dfb8-0bd1-4af7-a753-3314f26624c1"). InnerVolumeSpecName "kube-api-access-ns9g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:10 crc kubenswrapper[4922]: W0929 22:45:10.300301 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod217b822b_44c6_465e_982a_23fa07d94b58.slice/crio-97c187f67fcfefa943604ef4a265510e6fb2b145e038d78a1cabd98d9773e1de WatchSource:0}: Error finding container 97c187f67fcfefa943604ef4a265510e6fb2b145e038d78a1cabd98d9773e1de: Status 404 returned error can't find the container with id 97c187f67fcfefa943604ef4a265510e6fb2b145e038d78a1cabd98d9773e1de Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.302437 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nrr6k"] Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.348972 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:45:10 crc kubenswrapper[4922]: W0929 22:45:10.351548 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd0ad759_f12c_454b_b9e3_c2a58ccf74e3.slice/crio-2edeecde746156b021aab777be394caf756d544c4d34cc0f3b8c5d2727961c89 WatchSource:0}: Error finding container 2edeecde746156b021aab777be394caf756d544c4d34cc0f3b8c5d2727961c89: Status 404 returned error can't find the container with id 2edeecde746156b021aab777be394caf756d544c4d34cc0f3b8c5d2727961c89 Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.386094 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b629dfb8-0bd1-4af7-a753-3314f26624c1-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.386130 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ns9g9\" (UniqueName: \"kubernetes.io/projected/b629dfb8-0bd1-4af7-a753-3314f26624c1-kube-api-access-ns9g9\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.447576 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-4jkkx"] Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.515325 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4"] Sep 29 22:45:10 crc kubenswrapper[4922]: W0929 22:45:10.520321 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b5656b9_b6f8_4707_b988_b3bbc24986b2.slice/crio-42f39d3e7da28a7f258da3a903f828c5db1020f4ccf57dfcc07d35ed893b95d5 WatchSource:0}: Error finding container 42f39d3e7da28a7f258da3a903f828c5db1020f4ccf57dfcc07d35ed893b95d5: Status 404 returned error can't find the container with id 42f39d3e7da28a7f258da3a903f828c5db1020f4ccf57dfcc07d35ed893b95d5 Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.547961 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.645897 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 22:45:10 crc kubenswrapper[4922]: W0929 22:45:10.648619 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbce38540_6796_48b5_82e7_aad30cf98841.slice/crio-af4d8dd2788db06ecbf18e8b922a9ac7b1fed4d998dfaa887d1dff24df9810d9 WatchSource:0}: Error finding container af4d8dd2788db06ecbf18e8b922a9ac7b1fed4d998dfaa887d1dff24df9810d9: Status 404 returned error can't find the container with id af4d8dd2788db06ecbf18e8b922a9ac7b1fed4d998dfaa887d1dff24df9810d9 Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.707928 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bd0ad759-f12c-454b-b9e3-c2a58ccf74e3","Type":"ContainerStarted","Data":"2edeecde746156b021aab777be394caf756d544c4d34cc0f3b8c5d2727961c89"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.709387 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" event={"ID":"efc7fea0-1d2e-4ee0-868b-adee7cdb3eea","Type":"ContainerDied","Data":"cdd2def1568f0fdf82ee761c6eb6b918b991c10f655d42118f4968e57ff4f26a"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.709002 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-p48qw" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.710887 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e56d31de-64f5-42a7-8243-7ac6d992a03d","Type":"ContainerStarted","Data":"34658a45d429ee4156c92b9c0c2d869fe7dd616fe8bb3f832f80da1bc9e277b3"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.712732 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.712731 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-bwp9j" event={"ID":"b629dfb8-0bd1-4af7-a753-3314f26624c1","Type":"ContainerDied","Data":"746849a206691a8804069e5340ba4b2d56bec1a4bda347b4475c74b5311860da"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.713978 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bce38540-6796-48b5-82e7-aad30cf98841","Type":"ContainerStarted","Data":"af4d8dd2788db06ecbf18e8b922a9ac7b1fed4d998dfaa887d1dff24df9810d9"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.721916 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cab5f5be-6bdd-481b-a07b-08491f6f2be5","Type":"ContainerStarted","Data":"7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.724922 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" event={"ID":"4b5656b9-b6f8-4707-b988-b3bbc24986b2","Type":"ContainerStarted","Data":"da8ea04b3bd8c2168e8dfc1a97d02fc2c7375bcacfba3760942e140bef2d41dc"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.724957 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" event={"ID":"4b5656b9-b6f8-4707-b988-b3bbc24986b2","Type":"ContainerStarted","Data":"42f39d3e7da28a7f258da3a903f828c5db1020f4ccf57dfcc07d35ed893b95d5"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.725714 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"4a7323e3-8b0e-4f74-b0f4-73c5874fe361","Type":"ContainerStarted","Data":"92ca24d9521be6c461634b27176ea969e7a6aa3578044b9212d071a9ed1689ce"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.726904 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nrr6k" event={"ID":"217b822b-44c6-465e-982a-23fa07d94b58","Type":"ContainerStarted","Data":"97c187f67fcfefa943604ef4a265510e6fb2b145e038d78a1cabd98d9773e1de"} Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.728179 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4jkkx" event={"ID":"4594a140-3321-4a34-ab35-65ad3560b085","Type":"ContainerStarted","Data":"591d8a3bc54f16d3df0a65cdc361508792038fcbd0b26e36f247cab59f266e9b"} Sep 29 22:45:10 crc kubenswrapper[4922]: E0929 22:45:10.730422 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="1654e799-40ef-413a-8324-bb5b4f7a8f17" Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.808412 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-bwp9j"] Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.815364 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-bwp9j"] Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.825460 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p48qw"] Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.844663 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-p48qw"] Sep 29 22:45:10 crc kubenswrapper[4922]: I0929 22:45:10.886001 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" podStartSLOduration=10.885985751 podStartE2EDuration="10.885985751s" podCreationTimestamp="2025-09-29 22:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:45:10.88077847 +0000 UTC m=+1115.191067283" watchObservedRunningTime="2025-09-29 22:45:10.885985751 +0000 UTC m=+1115.196274564" Sep 29 22:45:11 crc kubenswrapper[4922]: I0929 22:45:11.737968 4922 generic.go:334] "Generic (PLEG): container finished" podID="4b5656b9-b6f8-4707-b988-b3bbc24986b2" containerID="da8ea04b3bd8c2168e8dfc1a97d02fc2c7375bcacfba3760942e140bef2d41dc" exitCode=0 Sep 29 22:45:11 crc kubenswrapper[4922]: I0929 22:45:11.738069 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" event={"ID":"4b5656b9-b6f8-4707-b988-b3bbc24986b2","Type":"ContainerDied","Data":"da8ea04b3bd8c2168e8dfc1a97d02fc2c7375bcacfba3760942e140bef2d41dc"} Sep 29 22:45:12 crc kubenswrapper[4922]: I0929 22:45:12.431831 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b629dfb8-0bd1-4af7-a753-3314f26624c1" path="/var/lib/kubelet/pods/b629dfb8-0bd1-4af7-a753-3314f26624c1/volumes" Sep 29 22:45:12 crc kubenswrapper[4922]: I0929 22:45:12.432520 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efc7fea0-1d2e-4ee0-868b-adee7cdb3eea" path="/var/lib/kubelet/pods/efc7fea0-1d2e-4ee0-868b-adee7cdb3eea/volumes" Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.398433 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.545126 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66vq7\" (UniqueName: \"kubernetes.io/projected/4b5656b9-b6f8-4707-b988-b3bbc24986b2-kube-api-access-66vq7\") pod \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.545359 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b5656b9-b6f8-4707-b988-b3bbc24986b2-secret-volume\") pod \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.545448 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b5656b9-b6f8-4707-b988-b3bbc24986b2-config-volume\") pod \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\" (UID: \"4b5656b9-b6f8-4707-b988-b3bbc24986b2\") " Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.548298 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b5656b9-b6f8-4707-b988-b3bbc24986b2-config-volume" (OuterVolumeSpecName: "config-volume") pod "4b5656b9-b6f8-4707-b988-b3bbc24986b2" (UID: "4b5656b9-b6f8-4707-b988-b3bbc24986b2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.554678 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b5656b9-b6f8-4707-b988-b3bbc24986b2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4b5656b9-b6f8-4707-b988-b3bbc24986b2" (UID: "4b5656b9-b6f8-4707-b988-b3bbc24986b2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.564251 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b5656b9-b6f8-4707-b988-b3bbc24986b2-kube-api-access-66vq7" (OuterVolumeSpecName: "kube-api-access-66vq7") pod "4b5656b9-b6f8-4707-b988-b3bbc24986b2" (UID: "4b5656b9-b6f8-4707-b988-b3bbc24986b2"). InnerVolumeSpecName "kube-api-access-66vq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.647702 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4b5656b9-b6f8-4707-b988-b3bbc24986b2-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.647744 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4b5656b9-b6f8-4707-b988-b3bbc24986b2-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.647758 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66vq7\" (UniqueName: \"kubernetes.io/projected/4b5656b9-b6f8-4707-b988-b3bbc24986b2-kube-api-access-66vq7\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.758598 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" event={"ID":"4b5656b9-b6f8-4707-b988-b3bbc24986b2","Type":"ContainerDied","Data":"42f39d3e7da28a7f258da3a903f828c5db1020f4ccf57dfcc07d35ed893b95d5"} Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.758648 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4" Sep 29 22:45:13 crc kubenswrapper[4922]: I0929 22:45:13.758658 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42f39d3e7da28a7f258da3a903f828c5db1020f4ccf57dfcc07d35ed893b95d5" Sep 29 22:45:14 crc kubenswrapper[4922]: I0929 22:45:14.777632 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bce38540-6796-48b5-82e7-aad30cf98841","Type":"ContainerStarted","Data":"f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d"} Sep 29 22:45:14 crc kubenswrapper[4922]: I0929 22:45:14.778867 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"4a7323e3-8b0e-4f74-b0f4-73c5874fe361","Type":"ContainerStarted","Data":"2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97"} Sep 29 22:45:14 crc kubenswrapper[4922]: I0929 22:45:14.780143 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nrr6k" event={"ID":"217b822b-44c6-465e-982a-23fa07d94b58","Type":"ContainerStarted","Data":"d32993d34e081cdec7334222d057489baf75c3258521cbb12f048b68fcbd008d"} Sep 29 22:45:14 crc kubenswrapper[4922]: I0929 22:45:14.780521 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-nrr6k" Sep 29 22:45:14 crc kubenswrapper[4922]: I0929 22:45:14.782948 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bd0ad759-f12c-454b-b9e3-c2a58ccf74e3","Type":"ContainerStarted","Data":"9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891"} Sep 29 22:45:14 crc kubenswrapper[4922]: I0929 22:45:14.783084 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 22:45:14 crc kubenswrapper[4922]: I0929 22:45:14.785019 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4jkkx" event={"ID":"4594a140-3321-4a34-ab35-65ad3560b085","Type":"ContainerStarted","Data":"0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f"} Sep 29 22:45:14 crc kubenswrapper[4922]: I0929 22:45:14.803704 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-nrr6k" podStartSLOduration=16.815801071 podStartE2EDuration="20.803686255s" podCreationTimestamp="2025-09-29 22:44:54 +0000 UTC" firstStartedPulling="2025-09-29 22:45:10.30449861 +0000 UTC m=+1114.614787423" lastFinishedPulling="2025-09-29 22:45:14.292383794 +0000 UTC m=+1118.602672607" observedRunningTime="2025-09-29 22:45:14.799805137 +0000 UTC m=+1119.110093940" watchObservedRunningTime="2025-09-29 22:45:14.803686255 +0000 UTC m=+1119.113975068" Sep 29 22:45:14 crc kubenswrapper[4922]: I0929 22:45:14.846502 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=22.853513252 podStartE2EDuration="26.846474994s" podCreationTimestamp="2025-09-29 22:44:48 +0000 UTC" firstStartedPulling="2025-09-29 22:45:10.354028189 +0000 UTC m=+1114.664317002" lastFinishedPulling="2025-09-29 22:45:14.346989931 +0000 UTC m=+1118.657278744" observedRunningTime="2025-09-29 22:45:14.817868273 +0000 UTC m=+1119.128157096" watchObservedRunningTime="2025-09-29 22:45:14.846474994 +0000 UTC m=+1119.156763817" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.019364 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-vc9v7"] Sep 29 22:45:15 crc kubenswrapper[4922]: E0929 22:45:15.020064 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b5656b9-b6f8-4707-b988-b3bbc24986b2" containerName="collect-profiles" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.020079 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b5656b9-b6f8-4707-b988-b3bbc24986b2" containerName="collect-profiles" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.020229 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b5656b9-b6f8-4707-b988-b3bbc24986b2" containerName="collect-profiles" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.020764 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.023581 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.053823 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-vc9v7"] Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.175529 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovn-rundir\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.175678 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.175715 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnpgq\" (UniqueName: \"kubernetes.io/projected/0f689cf2-292c-47a9-936d-57954d187f5d-kube-api-access-hnpgq\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.175772 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f689cf2-292c-47a9-936d-57954d187f5d-config\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.175920 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovs-rundir\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.175951 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-combined-ca-bundle\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.182867 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rntq2"] Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.224236 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-jlcpt"] Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.225825 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.228720 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.239200 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-jlcpt"] Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.278962 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovs-rundir\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279005 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-combined-ca-bundle\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279038 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279071 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvqvf\" (UniqueName: \"kubernetes.io/projected/0858be02-26c9-4eff-8c31-1506a090122f-kube-api-access-wvqvf\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279095 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovn-rundir\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279138 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279162 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-config\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279183 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnpgq\" (UniqueName: \"kubernetes.io/projected/0f689cf2-292c-47a9-936d-57954d187f5d-kube-api-access-hnpgq\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279202 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279223 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f689cf2-292c-47a9-936d-57954d187f5d-config\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.279967 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f689cf2-292c-47a9-936d-57954d187f5d-config\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.280272 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovn-rundir\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.280406 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovs-rundir\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.284980 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-combined-ca-bundle\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.285160 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.312004 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnpgq\" (UniqueName: \"kubernetes.io/projected/0f689cf2-292c-47a9-936d-57954d187f5d-kube-api-access-hnpgq\") pod \"ovn-controller-metrics-vc9v7\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.332375 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bcx8x"] Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.356415 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.377975 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-z4cb8"] Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.380954 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-config\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.381037 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.381146 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.381185 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvqvf\" (UniqueName: \"kubernetes.io/projected/0858be02-26c9-4eff-8c31-1506a090122f-kube-api-access-wvqvf\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.382698 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-config\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.383283 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.384161 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.387105 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.391494 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.421029 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-z4cb8"] Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.466959 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvqvf\" (UniqueName: \"kubernetes.io/projected/0858be02-26c9-4eff-8c31-1506a090122f-kube-api-access-wvqvf\") pod \"dnsmasq-dns-5bf47b49b7-jlcpt\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.482666 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqvvm\" (UniqueName: \"kubernetes.io/projected/c1063916-4458-4799-aa02-e4b53b5838e6-kube-api-access-kqvvm\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.482723 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.482786 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-dns-svc\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.482819 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.482857 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-config\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.559682 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.585321 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqvvm\" (UniqueName: \"kubernetes.io/projected/c1063916-4458-4799-aa02-e4b53b5838e6-kube-api-access-kqvvm\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.585369 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.585471 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-dns-svc\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.585502 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.585539 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-config\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.586220 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-config\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.586963 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.587526 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-dns-svc\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.588013 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.612469 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqvvm\" (UniqueName: \"kubernetes.io/projected/c1063916-4458-4799-aa02-e4b53b5838e6-kube-api-access-kqvvm\") pod \"dnsmasq-dns-8554648995-z4cb8\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.732446 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.791966 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jfb7\" (UniqueName: \"kubernetes.io/projected/086d2221-5075-45af-8bfb-c7b1908f410d-kube-api-access-5jfb7\") pod \"086d2221-5075-45af-8bfb-c7b1908f410d\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.792134 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-dns-svc\") pod \"086d2221-5075-45af-8bfb-c7b1908f410d\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.792194 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-config\") pod \"086d2221-5075-45af-8bfb-c7b1908f410d\" (UID: \"086d2221-5075-45af-8bfb-c7b1908f410d\") " Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.793226 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-config" (OuterVolumeSpecName: "config") pod "086d2221-5075-45af-8bfb-c7b1908f410d" (UID: "086d2221-5075-45af-8bfb-c7b1908f410d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.795949 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "086d2221-5075-45af-8bfb-c7b1908f410d" (UID: "086d2221-5075-45af-8bfb-c7b1908f410d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.818833 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.858266 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" event={"ID":"086d2221-5075-45af-8bfb-c7b1908f410d","Type":"ContainerDied","Data":"2956e1698ce5edcc96a63c3f8ea85345dfe433f033aa15f4a3a1913c527e3859"} Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.858363 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-bcx8x" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.859577 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/086d2221-5075-45af-8bfb-c7b1908f410d-kube-api-access-5jfb7" (OuterVolumeSpecName: "kube-api-access-5jfb7") pod "086d2221-5075-45af-8bfb-c7b1908f410d" (UID: "086d2221-5075-45af-8bfb-c7b1908f410d"). InnerVolumeSpecName "kube-api-access-5jfb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.889491 4922 generic.go:334] "Generic (PLEG): container finished" podID="4594a140-3321-4a34-ab35-65ad3560b085" containerID="0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f" exitCode=0 Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.891459 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4jkkx" event={"ID":"4594a140-3321-4a34-ab35-65ad3560b085","Type":"ContainerDied","Data":"0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f"} Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.893743 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jfb7\" (UniqueName: \"kubernetes.io/projected/086d2221-5075-45af-8bfb-c7b1908f410d-kube-api-access-5jfb7\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.893770 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.893781 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/086d2221-5075-45af-8bfb-c7b1908f410d-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.918468 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.994333 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-dns-svc\") pod \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.994859 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8fc7f380-d4f6-4713-953e-d97162e9e4e6" (UID: "8fc7f380-d4f6-4713-953e-d97162e9e4e6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.995605 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kbgg\" (UniqueName: \"kubernetes.io/projected/8fc7f380-d4f6-4713-953e-d97162e9e4e6-kube-api-access-5kbgg\") pod \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.995650 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-config\") pod \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\" (UID: \"8fc7f380-d4f6-4713-953e-d97162e9e4e6\") " Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.996080 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:15 crc kubenswrapper[4922]: I0929 22:45:15.999120 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-config" (OuterVolumeSpecName: "config") pod "8fc7f380-d4f6-4713-953e-d97162e9e4e6" (UID: "8fc7f380-d4f6-4713-953e-d97162e9e4e6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:15.999975 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fc7f380-d4f6-4713-953e-d97162e9e4e6-kube-api-access-5kbgg" (OuterVolumeSpecName: "kube-api-access-5kbgg") pod "8fc7f380-d4f6-4713-953e-d97162e9e4e6" (UID: "8fc7f380-d4f6-4713-953e-d97162e9e4e6"). InnerVolumeSpecName "kube-api-access-5kbgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.020213 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bcx8x"] Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.028227 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-bcx8x"] Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.035059 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-vc9v7"] Sep 29 22:45:16 crc kubenswrapper[4922]: W0929 22:45:16.044139 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f689cf2_292c_47a9_936d_57954d187f5d.slice/crio-85f78abe9162ea5dece822f6e0983f34c387e430c739b4cbba9410e3ac7c7769 WatchSource:0}: Error finding container 85f78abe9162ea5dece822f6e0983f34c387e430c739b4cbba9410e3ac7c7769: Status 404 returned error can't find the container with id 85f78abe9162ea5dece822f6e0983f34c387e430c739b4cbba9410e3ac7c7769 Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.096947 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kbgg\" (UniqueName: \"kubernetes.io/projected/8fc7f380-d4f6-4713-953e-d97162e9e4e6-kube-api-access-5kbgg\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.097267 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fc7f380-d4f6-4713-953e-d97162e9e4e6-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.229553 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-jlcpt"] Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.437127 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="086d2221-5075-45af-8bfb-c7b1908f410d" path="/var/lib/kubelet/pods/086d2221-5075-45af-8bfb-c7b1908f410d/volumes" Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.473957 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-z4cb8"] Sep 29 22:45:16 crc kubenswrapper[4922]: W0929 22:45:16.474854 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1063916_4458_4799_aa02_e4b53b5838e6.slice/crio-e888a9b6e466c357e7366ec03f32baee2839a491c93b3e264b0fca589963c900 WatchSource:0}: Error finding container e888a9b6e466c357e7366ec03f32baee2839a491c93b3e264b0fca589963c900: Status 404 returned error can't find the container with id e888a9b6e466c357e7366ec03f32baee2839a491c93b3e264b0fca589963c900 Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.898529 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-rntq2" event={"ID":"8fc7f380-d4f6-4713-953e-d97162e9e4e6","Type":"ContainerDied","Data":"be66a688666ffff56cd5d116cc7b8aafcf13bf54dfecfb6d7f5bdf2b139f4797"} Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.899848 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-rntq2" Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.904830 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4jkkx" event={"ID":"4594a140-3321-4a34-ab35-65ad3560b085","Type":"ContainerStarted","Data":"cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c"} Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.904867 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4jkkx" event={"ID":"4594a140-3321-4a34-ab35-65ad3560b085","Type":"ContainerStarted","Data":"2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a"} Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.905239 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.905664 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.906495 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-z4cb8" event={"ID":"c1063916-4458-4799-aa02-e4b53b5838e6","Type":"ContainerStarted","Data":"e888a9b6e466c357e7366ec03f32baee2839a491c93b3e264b0fca589963c900"} Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.907927 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-vc9v7" event={"ID":"0f689cf2-292c-47a9-936d-57954d187f5d","Type":"ContainerStarted","Data":"85f78abe9162ea5dece822f6e0983f34c387e430c739b4cbba9410e3ac7c7769"} Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.909374 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" event={"ID":"0858be02-26c9-4eff-8c31-1506a090122f","Type":"ContainerStarted","Data":"0a1b1e72b4544274d81bba8182f663fe0546ac0a24d3ced9851842d1e63f38a7"} Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.963117 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rntq2"] Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.971647 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-rntq2"] Sep 29 22:45:16 crc kubenswrapper[4922]: I0929 22:45:16.982616 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-4jkkx" podStartSLOduration=19.138294336 podStartE2EDuration="22.98259849s" podCreationTimestamp="2025-09-29 22:44:54 +0000 UTC" firstStartedPulling="2025-09-29 22:45:10.445860334 +0000 UTC m=+1114.756149167" lastFinishedPulling="2025-09-29 22:45:14.290164508 +0000 UTC m=+1118.600453321" observedRunningTime="2025-09-29 22:45:16.963091798 +0000 UTC m=+1121.273380621" watchObservedRunningTime="2025-09-29 22:45:16.98259849 +0000 UTC m=+1121.292887303" Sep 29 22:45:17 crc kubenswrapper[4922]: I0929 22:45:17.921282 4922 generic.go:334] "Generic (PLEG): container finished" podID="c1063916-4458-4799-aa02-e4b53b5838e6" containerID="2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c" exitCode=0 Sep 29 22:45:17 crc kubenswrapper[4922]: I0929 22:45:17.921327 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-z4cb8" event={"ID":"c1063916-4458-4799-aa02-e4b53b5838e6","Type":"ContainerDied","Data":"2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c"} Sep 29 22:45:17 crc kubenswrapper[4922]: I0929 22:45:17.924485 4922 generic.go:334] "Generic (PLEG): container finished" podID="0858be02-26c9-4eff-8c31-1506a090122f" containerID="2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01" exitCode=0 Sep 29 22:45:17 crc kubenswrapper[4922]: I0929 22:45:17.924546 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" event={"ID":"0858be02-26c9-4eff-8c31-1506a090122f","Type":"ContainerDied","Data":"2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01"} Sep 29 22:45:18 crc kubenswrapper[4922]: I0929 22:45:18.437217 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fc7f380-d4f6-4713-953e-d97162e9e4e6" path="/var/lib/kubelet/pods/8fc7f380-d4f6-4713-953e-d97162e9e4e6/volumes" Sep 29 22:45:18 crc kubenswrapper[4922]: I0929 22:45:18.942689 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bce38540-6796-48b5-82e7-aad30cf98841","Type":"ContainerStarted","Data":"0cb28827e71d50623944eacc3ac38f3ced5298c3beaa26d7f6d6c9b64829ff1d"} Sep 29 22:45:18 crc kubenswrapper[4922]: I0929 22:45:18.947925 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"4a7323e3-8b0e-4f74-b0f4-73c5874fe361","Type":"ContainerStarted","Data":"de3b093fd1fce51dc98ae3bc522017ff5ef8c0b5fedc8d245e4289a030aa5618"} Sep 29 22:45:18 crc kubenswrapper[4922]: I0929 22:45:18.952353 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-z4cb8" event={"ID":"c1063916-4458-4799-aa02-e4b53b5838e6","Type":"ContainerStarted","Data":"86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b"} Sep 29 22:45:18 crc kubenswrapper[4922]: I0929 22:45:18.952598 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:18 crc kubenswrapper[4922]: I0929 22:45:18.955686 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-vc9v7" event={"ID":"0f689cf2-292c-47a9-936d-57954d187f5d","Type":"ContainerStarted","Data":"3db7f086a726daa10073df95a7a2a04b8afb9e0774984976907105555406f660"} Sep 29 22:45:18 crc kubenswrapper[4922]: I0929 22:45:18.959521 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" event={"ID":"0858be02-26c9-4eff-8c31-1506a090122f","Type":"ContainerStarted","Data":"0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8"} Sep 29 22:45:18 crc kubenswrapper[4922]: I0929 22:45:18.981060 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 29 22:45:18 crc kubenswrapper[4922]: I0929 22:45:18.982367 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=18.232782434 podStartE2EDuration="25.982335318s" podCreationTimestamp="2025-09-29 22:44:53 +0000 UTC" firstStartedPulling="2025-09-29 22:45:10.65034455 +0000 UTC m=+1114.960633363" lastFinishedPulling="2025-09-29 22:45:18.399897434 +0000 UTC m=+1122.710186247" observedRunningTime="2025-09-29 22:45:18.980238085 +0000 UTC m=+1123.290526978" watchObservedRunningTime="2025-09-29 22:45:18.982335318 +0000 UTC m=+1123.292624171" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.021600 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-z4cb8" podStartSLOduration=3.516255626 podStartE2EDuration="4.021569757s" podCreationTimestamp="2025-09-29 22:45:15 +0000 UTC" firstStartedPulling="2025-09-29 22:45:16.477272789 +0000 UTC m=+1120.787561602" lastFinishedPulling="2025-09-29 22:45:16.98258692 +0000 UTC m=+1121.292875733" observedRunningTime="2025-09-29 22:45:19.006764724 +0000 UTC m=+1123.317053577" watchObservedRunningTime="2025-09-29 22:45:19.021569757 +0000 UTC m=+1123.331858610" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.051663 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" podStartSLOduration=3.575193373 podStartE2EDuration="4.051629685s" podCreationTimestamp="2025-09-29 22:45:15 +0000 UTC" firstStartedPulling="2025-09-29 22:45:16.240644574 +0000 UTC m=+1120.550933387" lastFinishedPulling="2025-09-29 22:45:16.717080886 +0000 UTC m=+1121.027369699" observedRunningTime="2025-09-29 22:45:19.051565183 +0000 UTC m=+1123.361854036" watchObservedRunningTime="2025-09-29 22:45:19.051629685 +0000 UTC m=+1123.361918508" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.053143 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.084084 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-vc9v7" podStartSLOduration=2.756649792 podStartE2EDuration="5.084054192s" podCreationTimestamp="2025-09-29 22:45:14 +0000 UTC" firstStartedPulling="2025-09-29 22:45:16.046088658 +0000 UTC m=+1120.356377471" lastFinishedPulling="2025-09-29 22:45:18.373493058 +0000 UTC m=+1122.683781871" observedRunningTime="2025-09-29 22:45:19.074176713 +0000 UTC m=+1123.384465566" watchObservedRunningTime="2025-09-29 22:45:19.084054192 +0000 UTC m=+1123.394343045" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.122633 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.124326 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=18.234878216 podStartE2EDuration="26.124306177s" podCreationTimestamp="2025-09-29 22:44:53 +0000 UTC" firstStartedPulling="2025-09-29 22:45:10.550773129 +0000 UTC m=+1114.861061942" lastFinishedPulling="2025-09-29 22:45:18.44020105 +0000 UTC m=+1122.750489903" observedRunningTime="2025-09-29 22:45:19.10420177 +0000 UTC m=+1123.414490623" watchObservedRunningTime="2025-09-29 22:45:19.124306177 +0000 UTC m=+1123.434595040" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.141548 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.199234 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.969991 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.970040 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:19 crc kubenswrapper[4922]: I0929 22:45:19.970055 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.028850 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.050196 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.402334 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.406680 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.408996 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.409167 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.409432 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.410969 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-x5mgr" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.434002 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.478505 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.478673 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jwtm\" (UniqueName: \"kubernetes.io/projected/014a5aba-d41a-4647-8459-c770534a4a60-kube-api-access-8jwtm\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.478825 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.478930 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/014a5aba-d41a-4647-8459-c770534a4a60-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.478960 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-config\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.479049 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-scripts\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.479178 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.581856 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.581899 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jwtm\" (UniqueName: \"kubernetes.io/projected/014a5aba-d41a-4647-8459-c770534a4a60-kube-api-access-8jwtm\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.581936 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.581969 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/014a5aba-d41a-4647-8459-c770534a4a60-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.582066 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-config\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.582171 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-scripts\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.582259 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.582402 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/014a5aba-d41a-4647-8459-c770534a4a60-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.584703 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-config\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.588286 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.588500 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.588541 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.589927 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-scripts\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.612749 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jwtm\" (UniqueName: \"kubernetes.io/projected/014a5aba-d41a-4647-8459-c770534a4a60-kube-api-access-8jwtm\") pod \"ovn-northd-0\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " pod="openstack/ovn-northd-0" Sep 29 22:45:20 crc kubenswrapper[4922]: I0929 22:45:20.733199 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 22:45:21 crc kubenswrapper[4922]: I0929 22:45:21.001650 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 22:45:21 crc kubenswrapper[4922]: I0929 22:45:21.995725 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"014a5aba-d41a-4647-8459-c770534a4a60","Type":"ContainerStarted","Data":"87119ad2e8555685bf1f7f2fab85f97375a3a9545140b405f20634d11571f857"} Sep 29 22:45:23 crc kubenswrapper[4922]: I0929 22:45:23.009074 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"014a5aba-d41a-4647-8459-c770534a4a60","Type":"ContainerStarted","Data":"18271197116a64d48cb8446e8bb69a2a9e1aea53d596b37826cbb2a61e257443"} Sep 29 22:45:23 crc kubenswrapper[4922]: I0929 22:45:23.009853 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 29 22:45:23 crc kubenswrapper[4922]: I0929 22:45:23.009889 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"014a5aba-d41a-4647-8459-c770534a4a60","Type":"ContainerStarted","Data":"372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5"} Sep 29 22:45:23 crc kubenswrapper[4922]: I0929 22:45:23.052070 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.001285002 podStartE2EDuration="3.052044735s" podCreationTimestamp="2025-09-29 22:45:20 +0000 UTC" firstStartedPulling="2025-09-29 22:45:21.010163654 +0000 UTC m=+1125.320452467" lastFinishedPulling="2025-09-29 22:45:22.060923377 +0000 UTC m=+1126.371212200" observedRunningTime="2025-09-29 22:45:23.036700268 +0000 UTC m=+1127.346989161" watchObservedRunningTime="2025-09-29 22:45:23.052044735 +0000 UTC m=+1127.362333578" Sep 29 22:45:24 crc kubenswrapper[4922]: I0929 22:45:24.024474 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"866ac5e5-219a-4afa-b6b3-0ca293c81f1d","Type":"ContainerStarted","Data":"b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5"} Sep 29 22:45:25 crc kubenswrapper[4922]: I0929 22:45:25.041795 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"aa85a019-83a6-4b71-abdb-7144be0105ae","Type":"ContainerStarted","Data":"d4fadda570345bb9aa23bb32aebb1fcd7e4a48ba19bc93e554b2daf36cc8b8d3"} Sep 29 22:45:25 crc kubenswrapper[4922]: I0929 22:45:25.561899 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:25 crc kubenswrapper[4922]: I0929 22:45:25.821625 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:25 crc kubenswrapper[4922]: I0929 22:45:25.919016 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-jlcpt"] Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.048196 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" podUID="0858be02-26c9-4eff-8c31-1506a090122f" containerName="dnsmasq-dns" containerID="cri-o://0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8" gracePeriod=10 Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.428783 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.505773 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-dns-svc\") pod \"0858be02-26c9-4eff-8c31-1506a090122f\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.505860 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvqvf\" (UniqueName: \"kubernetes.io/projected/0858be02-26c9-4eff-8c31-1506a090122f-kube-api-access-wvqvf\") pod \"0858be02-26c9-4eff-8c31-1506a090122f\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.505931 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-config\") pod \"0858be02-26c9-4eff-8c31-1506a090122f\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.505976 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-ovsdbserver-nb\") pod \"0858be02-26c9-4eff-8c31-1506a090122f\" (UID: \"0858be02-26c9-4eff-8c31-1506a090122f\") " Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.513682 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0858be02-26c9-4eff-8c31-1506a090122f-kube-api-access-wvqvf" (OuterVolumeSpecName: "kube-api-access-wvqvf") pod "0858be02-26c9-4eff-8c31-1506a090122f" (UID: "0858be02-26c9-4eff-8c31-1506a090122f"). InnerVolumeSpecName "kube-api-access-wvqvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.543845 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0858be02-26c9-4eff-8c31-1506a090122f" (UID: "0858be02-26c9-4eff-8c31-1506a090122f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.562772 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0858be02-26c9-4eff-8c31-1506a090122f" (UID: "0858be02-26c9-4eff-8c31-1506a090122f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.566534 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-config" (OuterVolumeSpecName: "config") pod "0858be02-26c9-4eff-8c31-1506a090122f" (UID: "0858be02-26c9-4eff-8c31-1506a090122f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.608041 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.608068 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvqvf\" (UniqueName: \"kubernetes.io/projected/0858be02-26c9-4eff-8c31-1506a090122f-kube-api-access-wvqvf\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.608079 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:26 crc kubenswrapper[4922]: I0929 22:45:26.608088 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0858be02-26c9-4eff-8c31-1506a090122f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.057627 4922 generic.go:334] "Generic (PLEG): container finished" podID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" containerID="b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5" exitCode=0 Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.057718 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"866ac5e5-219a-4afa-b6b3-0ca293c81f1d","Type":"ContainerDied","Data":"b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5"} Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.060129 4922 generic.go:334] "Generic (PLEG): container finished" podID="0858be02-26c9-4eff-8c31-1506a090122f" containerID="0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8" exitCode=0 Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.060181 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" event={"ID":"0858be02-26c9-4eff-8c31-1506a090122f","Type":"ContainerDied","Data":"0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8"} Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.060201 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" event={"ID":"0858be02-26c9-4eff-8c31-1506a090122f","Type":"ContainerDied","Data":"0a1b1e72b4544274d81bba8182f663fe0546ac0a24d3ced9851842d1e63f38a7"} Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.060225 4922 scope.go:117] "RemoveContainer" containerID="0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.060434 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-jlcpt" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.067429 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1654e799-40ef-413a-8324-bb5b4f7a8f17","Type":"ContainerStarted","Data":"e253e117e33e1551c4b4d444e0f50023636461d456d2027cdac35eb3aeb6c536"} Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.067696 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.123383 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.243429301 podStartE2EDuration="40.123362072s" podCreationTimestamp="2025-09-29 22:44:47 +0000 UTC" firstStartedPulling="2025-09-29 22:44:48.107540202 +0000 UTC m=+1092.417829015" lastFinishedPulling="2025-09-29 22:45:25.987472953 +0000 UTC m=+1130.297761786" observedRunningTime="2025-09-29 22:45:27.11774617 +0000 UTC m=+1131.428035003" watchObservedRunningTime="2025-09-29 22:45:27.123362072 +0000 UTC m=+1131.433650895" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.166081 4922 scope.go:117] "RemoveContainer" containerID="2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.203058 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-jlcpt"] Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.208819 4922 scope.go:117] "RemoveContainer" containerID="0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8" Sep 29 22:45:27 crc kubenswrapper[4922]: E0929 22:45:27.209310 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8\": container with ID starting with 0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8 not found: ID does not exist" containerID="0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.209348 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8"} err="failed to get container status \"0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8\": rpc error: code = NotFound desc = could not find container \"0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8\": container with ID starting with 0521e8ea7239f2493e20737bf2f981bdf3c5280e795c50eaf1f910f4a29de3e8 not found: ID does not exist" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.209375 4922 scope.go:117] "RemoveContainer" containerID="2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01" Sep 29 22:45:27 crc kubenswrapper[4922]: E0929 22:45:27.209711 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01\": container with ID starting with 2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01 not found: ID does not exist" containerID="2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.209733 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01"} err="failed to get container status \"2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01\": rpc error: code = NotFound desc = could not find container \"2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01\": container with ID starting with 2f3a361362f4e89cf9124fe34027eb2b822f9e7abe350f81f1c87faae037ff01 not found: ID does not exist" Sep 29 22:45:27 crc kubenswrapper[4922]: I0929 22:45:27.210270 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-jlcpt"] Sep 29 22:45:28 crc kubenswrapper[4922]: I0929 22:45:28.078182 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"866ac5e5-219a-4afa-b6b3-0ca293c81f1d","Type":"ContainerStarted","Data":"c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa"} Sep 29 22:45:28 crc kubenswrapper[4922]: I0929 22:45:28.113778 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.528169245 podStartE2EDuration="44.113750892s" podCreationTimestamp="2025-09-29 22:44:44 +0000 UTC" firstStartedPulling="2025-09-29 22:44:46.390049241 +0000 UTC m=+1090.700338054" lastFinishedPulling="2025-09-29 22:45:22.975630848 +0000 UTC m=+1127.285919701" observedRunningTime="2025-09-29 22:45:28.109803463 +0000 UTC m=+1132.420092276" watchObservedRunningTime="2025-09-29 22:45:28.113750892 +0000 UTC m=+1132.424039735" Sep 29 22:45:28 crc kubenswrapper[4922]: I0929 22:45:28.434783 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0858be02-26c9-4eff-8c31-1506a090122f" path="/var/lib/kubelet/pods/0858be02-26c9-4eff-8c31-1506a090122f/volumes" Sep 29 22:45:29 crc kubenswrapper[4922]: I0929 22:45:29.096263 4922 generic.go:334] "Generic (PLEG): container finished" podID="aa85a019-83a6-4b71-abdb-7144be0105ae" containerID="d4fadda570345bb9aa23bb32aebb1fcd7e4a48ba19bc93e554b2daf36cc8b8d3" exitCode=0 Sep 29 22:45:29 crc kubenswrapper[4922]: I0929 22:45:29.096321 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"aa85a019-83a6-4b71-abdb-7144be0105ae","Type":"ContainerDied","Data":"d4fadda570345bb9aa23bb32aebb1fcd7e4a48ba19bc93e554b2daf36cc8b8d3"} Sep 29 22:45:30 crc kubenswrapper[4922]: I0929 22:45:30.110037 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"aa85a019-83a6-4b71-abdb-7144be0105ae","Type":"ContainerStarted","Data":"db4919298398abfdfcd2f3e6d56abfb1dbc96dc63483648cf678375784ec8d59"} Sep 29 22:45:30 crc kubenswrapper[4922]: I0929 22:45:30.154370 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371991.700441 podStartE2EDuration="45.15433391s" podCreationTimestamp="2025-09-29 22:44:45 +0000 UTC" firstStartedPulling="2025-09-29 22:44:47.639890412 +0000 UTC m=+1091.950179225" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:45:30.141133157 +0000 UTC m=+1134.451421980" watchObservedRunningTime="2025-09-29 22:45:30.15433391 +0000 UTC m=+1134.464622773" Sep 29 22:45:32 crc kubenswrapper[4922]: I0929 22:45:32.494642 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 29 22:45:35 crc kubenswrapper[4922]: I0929 22:45:35.664684 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 29 22:45:35 crc kubenswrapper[4922]: I0929 22:45:35.665302 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 29 22:45:35 crc kubenswrapper[4922]: I0929 22:45:35.755790 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 29 22:45:35 crc kubenswrapper[4922]: I0929 22:45:35.844652 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 29 22:45:36 crc kubenswrapper[4922]: I0929 22:45:36.244479 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.015686 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.016026 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.082702 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.096077 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-wfl9h"] Sep 29 22:45:37 crc kubenswrapper[4922]: E0929 22:45:37.096474 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0858be02-26c9-4eff-8c31-1506a090122f" containerName="init" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.096492 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0858be02-26c9-4eff-8c31-1506a090122f" containerName="init" Sep 29 22:45:37 crc kubenswrapper[4922]: E0929 22:45:37.096519 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0858be02-26c9-4eff-8c31-1506a090122f" containerName="dnsmasq-dns" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.096525 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0858be02-26c9-4eff-8c31-1506a090122f" containerName="dnsmasq-dns" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.096692 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0858be02-26c9-4eff-8c31-1506a090122f" containerName="dnsmasq-dns" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.097246 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wfl9h" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.101151 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wfl9h"] Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.217422 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.218520 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72xsj\" (UniqueName: \"kubernetes.io/projected/d094da53-d934-420c-9ea8-501e62d38d30-kube-api-access-72xsj\") pod \"keystone-db-create-wfl9h\" (UID: \"d094da53-d934-420c-9ea8-501e62d38d30\") " pod="openstack/keystone-db-create-wfl9h" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.319876 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-9hwh7"] Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.320196 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72xsj\" (UniqueName: \"kubernetes.io/projected/d094da53-d934-420c-9ea8-501e62d38d30-kube-api-access-72xsj\") pod \"keystone-db-create-wfl9h\" (UID: \"d094da53-d934-420c-9ea8-501e62d38d30\") " pod="openstack/keystone-db-create-wfl9h" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.323203 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9hwh7" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.335722 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9hwh7"] Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.338170 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72xsj\" (UniqueName: \"kubernetes.io/projected/d094da53-d934-420c-9ea8-501e62d38d30-kube-api-access-72xsj\") pod \"keystone-db-create-wfl9h\" (UID: \"d094da53-d934-420c-9ea8-501e62d38d30\") " pod="openstack/keystone-db-create-wfl9h" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.413502 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wfl9h" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.421171 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dqr6\" (UniqueName: \"kubernetes.io/projected/1fdfa237-2999-45c5-a008-0e99fa6f479f-kube-api-access-9dqr6\") pod \"placement-db-create-9hwh7\" (UID: \"1fdfa237-2999-45c5-a008-0e99fa6f479f\") " pod="openstack/placement-db-create-9hwh7" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.522666 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dqr6\" (UniqueName: \"kubernetes.io/projected/1fdfa237-2999-45c5-a008-0e99fa6f479f-kube-api-access-9dqr6\") pod \"placement-db-create-9hwh7\" (UID: \"1fdfa237-2999-45c5-a008-0e99fa6f479f\") " pod="openstack/placement-db-create-9hwh7" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.556710 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dqr6\" (UniqueName: \"kubernetes.io/projected/1fdfa237-2999-45c5-a008-0e99fa6f479f-kube-api-access-9dqr6\") pod \"placement-db-create-9hwh7\" (UID: \"1fdfa237-2999-45c5-a008-0e99fa6f479f\") " pod="openstack/placement-db-create-9hwh7" Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.617022 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wfl9h"] Sep 29 22:45:37 crc kubenswrapper[4922]: I0929 22:45:37.638539 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9hwh7" Sep 29 22:45:38 crc kubenswrapper[4922]: W0929 22:45:38.143811 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fdfa237_2999_45c5_a008_0e99fa6f479f.slice/crio-a24dcbcf83bd572f4a26e9a06f720721a09a108c741014026d926ec3cd2f8b96 WatchSource:0}: Error finding container a24dcbcf83bd572f4a26e9a06f720721a09a108c741014026d926ec3cd2f8b96: Status 404 returned error can't find the container with id a24dcbcf83bd572f4a26e9a06f720721a09a108c741014026d926ec3cd2f8b96 Sep 29 22:45:38 crc kubenswrapper[4922]: I0929 22:45:38.146623 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9hwh7"] Sep 29 22:45:38 crc kubenswrapper[4922]: I0929 22:45:38.201780 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9hwh7" event={"ID":"1fdfa237-2999-45c5-a008-0e99fa6f479f","Type":"ContainerStarted","Data":"a24dcbcf83bd572f4a26e9a06f720721a09a108c741014026d926ec3cd2f8b96"} Sep 29 22:45:38 crc kubenswrapper[4922]: I0929 22:45:38.210162 4922 generic.go:334] "Generic (PLEG): container finished" podID="d094da53-d934-420c-9ea8-501e62d38d30" containerID="5dee3f93cfad466ddb0c5b6ec802c83864fdd84bc637dd69a34af508bfc25b2d" exitCode=0 Sep 29 22:45:38 crc kubenswrapper[4922]: I0929 22:45:38.210288 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wfl9h" event={"ID":"d094da53-d934-420c-9ea8-501e62d38d30","Type":"ContainerDied","Data":"5dee3f93cfad466ddb0c5b6ec802c83864fdd84bc637dd69a34af508bfc25b2d"} Sep 29 22:45:38 crc kubenswrapper[4922]: I0929 22:45:38.210379 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wfl9h" event={"ID":"d094da53-d934-420c-9ea8-501e62d38d30","Type":"ContainerStarted","Data":"13d303c4823cb63ac3b90a68c1265ab5a865319e6f1ae3031d088d97c22fd5df"} Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.147474 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-llwhb"] Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.148918 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.172339 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-llwhb"] Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.220880 4922 generic.go:334] "Generic (PLEG): container finished" podID="1fdfa237-2999-45c5-a008-0e99fa6f479f" containerID="0f7068f056c410853b677deba5127eb7577a738fac825eb4b239d2f1371200c6" exitCode=0 Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.221151 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9hwh7" event={"ID":"1fdfa237-2999-45c5-a008-0e99fa6f479f","Type":"ContainerDied","Data":"0f7068f056c410853b677deba5127eb7577a738fac825eb4b239d2f1371200c6"} Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.254883 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-config\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.255077 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.255144 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.257864 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.257903 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pct25\" (UniqueName: \"kubernetes.io/projected/c672b20c-8486-4def-9b55-6907518cb710-kube-api-access-pct25\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.359848 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-config\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.359962 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.360007 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.360054 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.360080 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pct25\" (UniqueName: \"kubernetes.io/projected/c672b20c-8486-4def-9b55-6907518cb710-kube-api-access-pct25\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.360815 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-config\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.360849 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.360941 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.360996 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.382188 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pct25\" (UniqueName: \"kubernetes.io/projected/c672b20c-8486-4def-9b55-6907518cb710-kube-api-access-pct25\") pod \"dnsmasq-dns-b8fbc5445-llwhb\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.463852 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.553054 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wfl9h" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.665740 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72xsj\" (UniqueName: \"kubernetes.io/projected/d094da53-d934-420c-9ea8-501e62d38d30-kube-api-access-72xsj\") pod \"d094da53-d934-420c-9ea8-501e62d38d30\" (UID: \"d094da53-d934-420c-9ea8-501e62d38d30\") " Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.673573 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d094da53-d934-420c-9ea8-501e62d38d30-kube-api-access-72xsj" (OuterVolumeSpecName: "kube-api-access-72xsj") pod "d094da53-d934-420c-9ea8-501e62d38d30" (UID: "d094da53-d934-420c-9ea8-501e62d38d30"). InnerVolumeSpecName "kube-api-access-72xsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.767826 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72xsj\" (UniqueName: \"kubernetes.io/projected/d094da53-d934-420c-9ea8-501e62d38d30-kube-api-access-72xsj\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:39 crc kubenswrapper[4922]: I0929 22:45:39.927872 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-llwhb"] Sep 29 22:45:39 crc kubenswrapper[4922]: W0929 22:45:39.934946 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc672b20c_8486_4def_9b55_6907518cb710.slice/crio-1c77d4828900bef83d607978bda7e94dc7949bc51f42fba1760a536c2d436d7d WatchSource:0}: Error finding container 1c77d4828900bef83d607978bda7e94dc7949bc51f42fba1760a536c2d436d7d: Status 404 returned error can't find the container with id 1c77d4828900bef83d607978bda7e94dc7949bc51f42fba1760a536c2d436d7d Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.228626 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wfl9h" event={"ID":"d094da53-d934-420c-9ea8-501e62d38d30","Type":"ContainerDied","Data":"13d303c4823cb63ac3b90a68c1265ab5a865319e6f1ae3031d088d97c22fd5df"} Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.228901 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13d303c4823cb63ac3b90a68c1265ab5a865319e6f1ae3031d088d97c22fd5df" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.228642 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wfl9h" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.229990 4922 generic.go:334] "Generic (PLEG): container finished" podID="c672b20c-8486-4def-9b55-6907518cb710" containerID="d89cb411a6e68c912f20d4960314e51355cf8f2cea34465d64222d459952e11e" exitCode=0 Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.230053 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" event={"ID":"c672b20c-8486-4def-9b55-6907518cb710","Type":"ContainerDied","Data":"d89cb411a6e68c912f20d4960314e51355cf8f2cea34465d64222d459952e11e"} Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.230099 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" event={"ID":"c672b20c-8486-4def-9b55-6907518cb710","Type":"ContainerStarted","Data":"1c77d4828900bef83d607978bda7e94dc7949bc51f42fba1760a536c2d436d7d"} Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.259375 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 29 22:45:40 crc kubenswrapper[4922]: E0929 22:45:40.260121 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d094da53-d934-420c-9ea8-501e62d38d30" containerName="mariadb-database-create" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.260136 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d094da53-d934-420c-9ea8-501e62d38d30" containerName="mariadb-database-create" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.260297 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d094da53-d934-420c-9ea8-501e62d38d30" containerName="mariadb-database-create" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.269156 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.271854 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.272244 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.273378 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-fdmhb" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.273707 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.294035 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.376750 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.376795 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-lock\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.376929 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.377008 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-cache\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.377084 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zjbb\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-kube-api-access-7zjbb\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.480114 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.481079 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-lock\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.481191 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.481229 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-cache\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.481282 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zjbb\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-kube-api-access-7zjbb\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: E0929 22:45:40.480800 4922 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 22:45:40 crc kubenswrapper[4922]: E0929 22:45:40.481964 4922 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 22:45:40 crc kubenswrapper[4922]: E0929 22:45:40.482013 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift podName:e0b8c219-afd8-41e5-a9d7-686c7b70fd70 nodeName:}" failed. No retries permitted until 2025-09-29 22:45:40.981996064 +0000 UTC m=+1145.292284877 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift") pod "swift-storage-0" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70") : configmap "swift-ring-files" not found Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.482312 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.482440 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-lock\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.482787 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-cache\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.499513 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zjbb\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-kube-api-access-7zjbb\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.511944 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.652459 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9hwh7" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.684525 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dqr6\" (UniqueName: \"kubernetes.io/projected/1fdfa237-2999-45c5-a008-0e99fa6f479f-kube-api-access-9dqr6\") pod \"1fdfa237-2999-45c5-a008-0e99fa6f479f\" (UID: \"1fdfa237-2999-45c5-a008-0e99fa6f479f\") " Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.690601 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fdfa237-2999-45c5-a008-0e99fa6f479f-kube-api-access-9dqr6" (OuterVolumeSpecName: "kube-api-access-9dqr6") pod "1fdfa237-2999-45c5-a008-0e99fa6f479f" (UID: "1fdfa237-2999-45c5-a008-0e99fa6f479f"). InnerVolumeSpecName "kube-api-access-9dqr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.786278 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dqr6\" (UniqueName: \"kubernetes.io/projected/1fdfa237-2999-45c5-a008-0e99fa6f479f-kube-api-access-9dqr6\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:40 crc kubenswrapper[4922]: I0929 22:45:40.990122 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:40 crc kubenswrapper[4922]: E0929 22:45:40.990448 4922 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 22:45:40 crc kubenswrapper[4922]: E0929 22:45:40.990841 4922 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 22:45:40 crc kubenswrapper[4922]: E0929 22:45:40.990936 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift podName:e0b8c219-afd8-41e5-a9d7-686c7b70fd70 nodeName:}" failed. No retries permitted until 2025-09-29 22:45:41.990907675 +0000 UTC m=+1146.301196528 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift") pod "swift-storage-0" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70") : configmap "swift-ring-files" not found Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.244280 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" event={"ID":"c672b20c-8486-4def-9b55-6907518cb710","Type":"ContainerStarted","Data":"fd728657c94eab24147d3c2acb47f39b080b44c782f519f650b2d82010f8c22d"} Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.244381 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.246283 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9hwh7" event={"ID":"1fdfa237-2999-45c5-a008-0e99fa6f479f","Type":"ContainerDied","Data":"a24dcbcf83bd572f4a26e9a06f720721a09a108c741014026d926ec3cd2f8b96"} Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.246323 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a24dcbcf83bd572f4a26e9a06f720721a09a108c741014026d926ec3cd2f8b96" Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.246381 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9hwh7" Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.248516 4922 generic.go:334] "Generic (PLEG): container finished" podID="e56d31de-64f5-42a7-8243-7ac6d992a03d" containerID="34658a45d429ee4156c92b9c0c2d869fe7dd616fe8bb3f832f80da1bc9e277b3" exitCode=0 Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.248638 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e56d31de-64f5-42a7-8243-7ac6d992a03d","Type":"ContainerDied","Data":"34658a45d429ee4156c92b9c0c2d869fe7dd616fe8bb3f832f80da1bc9e277b3"} Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.251005 4922 generic.go:334] "Generic (PLEG): container finished" podID="cab5f5be-6bdd-481b-a07b-08491f6f2be5" containerID="7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f" exitCode=0 Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.251089 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cab5f5be-6bdd-481b-a07b-08491f6f2be5","Type":"ContainerDied","Data":"7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f"} Sep 29 22:45:41 crc kubenswrapper[4922]: I0929 22:45:41.284349 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" podStartSLOduration=2.284323863 podStartE2EDuration="2.284323863s" podCreationTimestamp="2025-09-29 22:45:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:45:41.273189142 +0000 UTC m=+1145.583477965" watchObservedRunningTime="2025-09-29 22:45:41.284323863 +0000 UTC m=+1145.594612706" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.008331 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:42 crc kubenswrapper[4922]: E0929 22:45:42.008594 4922 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 22:45:42 crc kubenswrapper[4922]: E0929 22:45:42.008626 4922 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 22:45:42 crc kubenswrapper[4922]: E0929 22:45:42.008706 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift podName:e0b8c219-afd8-41e5-a9d7-686c7b70fd70 nodeName:}" failed. No retries permitted until 2025-09-29 22:45:44.008682956 +0000 UTC m=+1148.318971809 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift") pod "swift-storage-0" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70") : configmap "swift-ring-files" not found Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.260651 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e56d31de-64f5-42a7-8243-7ac6d992a03d","Type":"ContainerStarted","Data":"b6074f8dda50ed5b4ce98889541af126fde4d515d920458ec2ced51aad77f19d"} Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.260936 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.265503 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cab5f5be-6bdd-481b-a07b-08491f6f2be5","Type":"ContainerStarted","Data":"36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd"} Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.291137 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371976.563656 podStartE2EDuration="1m0.291119697s" podCreationTimestamp="2025-09-29 22:44:42 +0000 UTC" firstStartedPulling="2025-09-29 22:44:44.127942187 +0000 UTC m=+1088.438231000" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:45:42.285990658 +0000 UTC m=+1146.596279511" watchObservedRunningTime="2025-09-29 22:45:42.291119697 +0000 UTC m=+1146.601408520" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.321300 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.036175472 podStartE2EDuration="1m0.321278367s" podCreationTimestamp="2025-09-29 22:44:42 +0000 UTC" firstStartedPulling="2025-09-29 22:44:44.760416093 +0000 UTC m=+1089.070704906" lastFinishedPulling="2025-09-29 22:45:09.045518978 +0000 UTC m=+1113.355807801" observedRunningTime="2025-09-29 22:45:42.313539912 +0000 UTC m=+1146.623828725" watchObservedRunningTime="2025-09-29 22:45:42.321278367 +0000 UTC m=+1146.631567200" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.741653 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-sxbsj"] Sep 29 22:45:42 crc kubenswrapper[4922]: E0929 22:45:42.741947 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fdfa237-2999-45c5-a008-0e99fa6f479f" containerName="mariadb-database-create" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.741968 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fdfa237-2999-45c5-a008-0e99fa6f479f" containerName="mariadb-database-create" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.742125 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fdfa237-2999-45c5-a008-0e99fa6f479f" containerName="mariadb-database-create" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.742628 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sxbsj" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.761406 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-sxbsj"] Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.822435 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h66l7\" (UniqueName: \"kubernetes.io/projected/f219fe95-4c9e-46af-b01c-f39503f1ca4e-kube-api-access-h66l7\") pod \"glance-db-create-sxbsj\" (UID: \"f219fe95-4c9e-46af-b01c-f39503f1ca4e\") " pod="openstack/glance-db-create-sxbsj" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.924284 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h66l7\" (UniqueName: \"kubernetes.io/projected/f219fe95-4c9e-46af-b01c-f39503f1ca4e-kube-api-access-h66l7\") pod \"glance-db-create-sxbsj\" (UID: \"f219fe95-4c9e-46af-b01c-f39503f1ca4e\") " pod="openstack/glance-db-create-sxbsj" Sep 29 22:45:42 crc kubenswrapper[4922]: I0929 22:45:42.944746 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h66l7\" (UniqueName: \"kubernetes.io/projected/f219fe95-4c9e-46af-b01c-f39503f1ca4e-kube-api-access-h66l7\") pod \"glance-db-create-sxbsj\" (UID: \"f219fe95-4c9e-46af-b01c-f39503f1ca4e\") " pod="openstack/glance-db-create-sxbsj" Sep 29 22:45:43 crc kubenswrapper[4922]: I0929 22:45:43.063090 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sxbsj" Sep 29 22:45:43 crc kubenswrapper[4922]: I0929 22:45:43.543956 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-sxbsj"] Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.044973 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:44 crc kubenswrapper[4922]: E0929 22:45:44.045278 4922 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 22:45:44 crc kubenswrapper[4922]: E0929 22:45:44.045328 4922 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 22:45:44 crc kubenswrapper[4922]: E0929 22:45:44.045455 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift podName:e0b8c219-afd8-41e5-a9d7-686c7b70fd70 nodeName:}" failed. No retries permitted until 2025-09-29 22:45:48.045425527 +0000 UTC m=+1152.355714370 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift") pod "swift-storage-0" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70") : configmap "swift-ring-files" not found Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.210434 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.239218 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-9lrwb"] Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.240909 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.243971 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.244185 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.245451 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.258332 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-9lrwb"] Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.290223 4922 generic.go:334] "Generic (PLEG): container finished" podID="f219fe95-4c9e-46af-b01c-f39503f1ca4e" containerID="73abdf3beef709c2dc64e99c4195dc633f8c2a0e3781acbe2e75749f2f9eeffc" exitCode=0 Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.290282 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sxbsj" event={"ID":"f219fe95-4c9e-46af-b01c-f39503f1ca4e","Type":"ContainerDied","Data":"73abdf3beef709c2dc64e99c4195dc633f8c2a0e3781acbe2e75749f2f9eeffc"} Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.290322 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sxbsj" event={"ID":"f219fe95-4c9e-46af-b01c-f39503f1ca4e","Type":"ContainerStarted","Data":"ef4a8db77cf813f604059a8c1e01e76d3a7b1a538b32c954b265d8005eaf3a7a"} Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.351921 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-scripts\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.351991 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-etc-swift\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.352164 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-ring-data-devices\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.352306 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-dispersionconf\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.352446 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lxpr\" (UniqueName: \"kubernetes.io/projected/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-kube-api-access-2lxpr\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.352617 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-combined-ca-bundle\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.352649 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-swiftconf\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.454072 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-scripts\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.454148 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-etc-swift\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.454206 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-ring-data-devices\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.454278 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-dispersionconf\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.454336 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lxpr\" (UniqueName: \"kubernetes.io/projected/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-kube-api-access-2lxpr\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.454468 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-combined-ca-bundle\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.454517 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-swiftconf\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.455121 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-etc-swift\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.455175 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-scripts\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.455762 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-ring-data-devices\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.462864 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-swiftconf\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.463149 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-dispersionconf\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.463983 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-combined-ca-bundle\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.479121 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lxpr\" (UniqueName: \"kubernetes.io/projected/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-kube-api-access-2lxpr\") pod \"swift-ring-rebalance-9lrwb\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.559485 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.968571 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-nrr6k" podUID="217b822b-44c6-465e-982a-23fa07d94b58" containerName="ovn-controller" probeResult="failure" output=< Sep 29 22:45:44 crc kubenswrapper[4922]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 22:45:44 crc kubenswrapper[4922]: > Sep 29 22:45:44 crc kubenswrapper[4922]: I0929 22:45:44.998246 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-9lrwb"] Sep 29 22:45:45 crc kubenswrapper[4922]: I0929 22:45:45.315576 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9lrwb" event={"ID":"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0","Type":"ContainerStarted","Data":"6429b43056a992a2cffdcd9eddb0cb44a87d6e9b791d24d49a0071a169fc6050"} Sep 29 22:45:45 crc kubenswrapper[4922]: I0929 22:45:45.736826 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sxbsj" Sep 29 22:45:45 crc kubenswrapper[4922]: I0929 22:45:45.889736 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h66l7\" (UniqueName: \"kubernetes.io/projected/f219fe95-4c9e-46af-b01c-f39503f1ca4e-kube-api-access-h66l7\") pod \"f219fe95-4c9e-46af-b01c-f39503f1ca4e\" (UID: \"f219fe95-4c9e-46af-b01c-f39503f1ca4e\") " Sep 29 22:45:45 crc kubenswrapper[4922]: I0929 22:45:45.911044 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f219fe95-4c9e-46af-b01c-f39503f1ca4e-kube-api-access-h66l7" (OuterVolumeSpecName: "kube-api-access-h66l7") pod "f219fe95-4c9e-46af-b01c-f39503f1ca4e" (UID: "f219fe95-4c9e-46af-b01c-f39503f1ca4e"). InnerVolumeSpecName "kube-api-access-h66l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:45 crc kubenswrapper[4922]: I0929 22:45:45.991473 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h66l7\" (UniqueName: \"kubernetes.io/projected/f219fe95-4c9e-46af-b01c-f39503f1ca4e-kube-api-access-h66l7\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:46 crc kubenswrapper[4922]: I0929 22:45:46.324201 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sxbsj" event={"ID":"f219fe95-4c9e-46af-b01c-f39503f1ca4e","Type":"ContainerDied","Data":"ef4a8db77cf813f604059a8c1e01e76d3a7b1a538b32c954b265d8005eaf3a7a"} Sep 29 22:45:46 crc kubenswrapper[4922]: I0929 22:45:46.324490 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef4a8db77cf813f604059a8c1e01e76d3a7b1a538b32c954b265d8005eaf3a7a" Sep 29 22:45:46 crc kubenswrapper[4922]: I0929 22:45:46.324284 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sxbsj" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.131953 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-dc06-account-create-ls6hh"] Sep 29 22:45:47 crc kubenswrapper[4922]: E0929 22:45:47.132353 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f219fe95-4c9e-46af-b01c-f39503f1ca4e" containerName="mariadb-database-create" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.132369 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f219fe95-4c9e-46af-b01c-f39503f1ca4e" containerName="mariadb-database-create" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.132588 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f219fe95-4c9e-46af-b01c-f39503f1ca4e" containerName="mariadb-database-create" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.133167 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-dc06-account-create-ls6hh" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.134657 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.141801 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-dc06-account-create-ls6hh"] Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.213446 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbv7w\" (UniqueName: \"kubernetes.io/projected/71a94cd2-8bcf-4026-8668-1c55d6e1a8a7-kube-api-access-nbv7w\") pod \"keystone-dc06-account-create-ls6hh\" (UID: \"71a94cd2-8bcf-4026-8668-1c55d6e1a8a7\") " pod="openstack/keystone-dc06-account-create-ls6hh" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.315412 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbv7w\" (UniqueName: \"kubernetes.io/projected/71a94cd2-8bcf-4026-8668-1c55d6e1a8a7-kube-api-access-nbv7w\") pod \"keystone-dc06-account-create-ls6hh\" (UID: \"71a94cd2-8bcf-4026-8668-1c55d6e1a8a7\") " pod="openstack/keystone-dc06-account-create-ls6hh" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.337569 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbv7w\" (UniqueName: \"kubernetes.io/projected/71a94cd2-8bcf-4026-8668-1c55d6e1a8a7-kube-api-access-nbv7w\") pod \"keystone-dc06-account-create-ls6hh\" (UID: \"71a94cd2-8bcf-4026-8668-1c55d6e1a8a7\") " pod="openstack/keystone-dc06-account-create-ls6hh" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.428772 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-66ef-account-create-tv2kg"] Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.430400 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-66ef-account-create-tv2kg" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.433898 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.443428 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-66ef-account-create-tv2kg"] Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.464956 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-dc06-account-create-ls6hh" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.518630 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb2v6\" (UniqueName: \"kubernetes.io/projected/191228de-91b1-48d9-a5ac-48846dddf3ed-kube-api-access-kb2v6\") pod \"placement-66ef-account-create-tv2kg\" (UID: \"191228de-91b1-48d9-a5ac-48846dddf3ed\") " pod="openstack/placement-66ef-account-create-tv2kg" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.620366 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb2v6\" (UniqueName: \"kubernetes.io/projected/191228de-91b1-48d9-a5ac-48846dddf3ed-kube-api-access-kb2v6\") pod \"placement-66ef-account-create-tv2kg\" (UID: \"191228de-91b1-48d9-a5ac-48846dddf3ed\") " pod="openstack/placement-66ef-account-create-tv2kg" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.636967 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb2v6\" (UniqueName: \"kubernetes.io/projected/191228de-91b1-48d9-a5ac-48846dddf3ed-kube-api-access-kb2v6\") pod \"placement-66ef-account-create-tv2kg\" (UID: \"191228de-91b1-48d9-a5ac-48846dddf3ed\") " pod="openstack/placement-66ef-account-create-tv2kg" Sep 29 22:45:47 crc kubenswrapper[4922]: I0929 22:45:47.753983 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-66ef-account-create-tv2kg" Sep 29 22:45:48 crc kubenswrapper[4922]: W0929 22:45:48.107891 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod191228de_91b1_48d9_a5ac_48846dddf3ed.slice/crio-375eba71476231652195cbf6490693d29768caf307cc54ab44148212a89bda1e WatchSource:0}: Error finding container 375eba71476231652195cbf6490693d29768caf307cc54ab44148212a89bda1e: Status 404 returned error can't find the container with id 375eba71476231652195cbf6490693d29768caf307cc54ab44148212a89bda1e Sep 29 22:45:48 crc kubenswrapper[4922]: I0929 22:45:48.110529 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-66ef-account-create-tv2kg"] Sep 29 22:45:48 crc kubenswrapper[4922]: I0929 22:45:48.133773 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:48 crc kubenswrapper[4922]: E0929 22:45:48.134056 4922 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 22:45:48 crc kubenswrapper[4922]: E0929 22:45:48.134074 4922 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 22:45:48 crc kubenswrapper[4922]: E0929 22:45:48.134124 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift podName:e0b8c219-afd8-41e5-a9d7-686c7b70fd70 nodeName:}" failed. No retries permitted until 2025-09-29 22:45:56.134106632 +0000 UTC m=+1160.444395465 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift") pod "swift-storage-0" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70") : configmap "swift-ring-files" not found Sep 29 22:45:48 crc kubenswrapper[4922]: I0929 22:45:48.355918 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9lrwb" event={"ID":"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0","Type":"ContainerStarted","Data":"15442b286cfb918d0b410822fb8fe774891b5bb95ac0d45341c5a3d2baaa7d1c"} Sep 29 22:45:48 crc kubenswrapper[4922]: I0929 22:45:48.357486 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-66ef-account-create-tv2kg" event={"ID":"191228de-91b1-48d9-a5ac-48846dddf3ed","Type":"ContainerStarted","Data":"08bf9bfd078c072ef33d724c9ee636df566a2fd59a2f592dc6c73284ee0025ac"} Sep 29 22:45:48 crc kubenswrapper[4922]: I0929 22:45:48.357534 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-66ef-account-create-tv2kg" event={"ID":"191228de-91b1-48d9-a5ac-48846dddf3ed","Type":"ContainerStarted","Data":"375eba71476231652195cbf6490693d29768caf307cc54ab44148212a89bda1e"} Sep 29 22:45:48 crc kubenswrapper[4922]: I0929 22:45:48.399469 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-9lrwb" podStartSLOduration=1.539539577 podStartE2EDuration="4.399444642s" podCreationTimestamp="2025-09-29 22:45:44 +0000 UTC" firstStartedPulling="2025-09-29 22:45:45.017540806 +0000 UTC m=+1149.327829619" lastFinishedPulling="2025-09-29 22:45:47.877445861 +0000 UTC m=+1152.187734684" observedRunningTime="2025-09-29 22:45:48.38389347 +0000 UTC m=+1152.694182293" watchObservedRunningTime="2025-09-29 22:45:48.399444642 +0000 UTC m=+1152.709733495" Sep 29 22:45:48 crc kubenswrapper[4922]: I0929 22:45:48.419991 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-dc06-account-create-ls6hh"] Sep 29 22:45:48 crc kubenswrapper[4922]: I0929 22:45:48.420401 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-66ef-account-create-tv2kg" podStartSLOduration=1.42037834 podStartE2EDuration="1.42037834s" podCreationTimestamp="2025-09-29 22:45:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:45:48.417768564 +0000 UTC m=+1152.728057407" watchObservedRunningTime="2025-09-29 22:45:48.42037834 +0000 UTC m=+1152.730667163" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.371069 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-dc06-account-create-ls6hh" event={"ID":"71a94cd2-8bcf-4026-8668-1c55d6e1a8a7","Type":"ContainerStarted","Data":"4e669d9dab7924ffde5ba905dfed402d643a8bfbec30bede0e88432df10b3f2b"} Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.371125 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-dc06-account-create-ls6hh" event={"ID":"71a94cd2-8bcf-4026-8668-1c55d6e1a8a7","Type":"ContainerStarted","Data":"897f399db2117a28ff17086ad3a42422dc0a9004c2052104df75fe279c701b21"} Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.465658 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.551964 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-z4cb8"] Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.552272 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-z4cb8" podUID="c1063916-4458-4799-aa02-e4b53b5838e6" containerName="dnsmasq-dns" containerID="cri-o://86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b" gracePeriod=10 Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.693346 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.705507 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.899679 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-nrr6k-config-c54ts"] Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.901130 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.903813 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.919475 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nrr6k-config-c54ts"] Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.986925 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-scripts\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.986979 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-additional-scripts\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.987031 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run-ovn\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.987154 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz9vg\" (UniqueName: \"kubernetes.io/projected/d1c4297d-d04d-4da5-8929-25580ef2c56b-kube-api-access-wz9vg\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.987245 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.987301 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-log-ovn\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:49 crc kubenswrapper[4922]: I0929 22:45:49.987998 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-nrr6k" podUID="217b822b-44c6-465e-982a-23fa07d94b58" containerName="ovn-controller" probeResult="failure" output=< Sep 29 22:45:49 crc kubenswrapper[4922]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 22:45:49 crc kubenswrapper[4922]: > Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.088686 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-scripts\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.088774 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-additional-scripts\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.088839 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run-ovn\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.088885 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz9vg\" (UniqueName: \"kubernetes.io/projected/d1c4297d-d04d-4da5-8929-25580ef2c56b-kube-api-access-wz9vg\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.088934 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.088961 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-log-ovn\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.089397 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run-ovn\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.089489 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.090056 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-additional-scripts\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.090141 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-log-ovn\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.092310 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-scripts\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.108794 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz9vg\" (UniqueName: \"kubernetes.io/projected/d1c4297d-d04d-4da5-8929-25580ef2c56b-kube-api-access-wz9vg\") pod \"ovn-controller-nrr6k-config-c54ts\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.215976 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.314043 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.379871 4922 generic.go:334] "Generic (PLEG): container finished" podID="c1063916-4458-4799-aa02-e4b53b5838e6" containerID="86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b" exitCode=0 Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.379930 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-z4cb8" event={"ID":"c1063916-4458-4799-aa02-e4b53b5838e6","Type":"ContainerDied","Data":"86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b"} Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.379954 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-z4cb8" event={"ID":"c1063916-4458-4799-aa02-e4b53b5838e6","Type":"ContainerDied","Data":"e888a9b6e466c357e7366ec03f32baee2839a491c93b3e264b0fca589963c900"} Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.379954 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-z4cb8" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.379970 4922 scope.go:117] "RemoveContainer" containerID="86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.386855 4922 generic.go:334] "Generic (PLEG): container finished" podID="71a94cd2-8bcf-4026-8668-1c55d6e1a8a7" containerID="4e669d9dab7924ffde5ba905dfed402d643a8bfbec30bede0e88432df10b3f2b" exitCode=0 Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.386921 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-dc06-account-create-ls6hh" event={"ID":"71a94cd2-8bcf-4026-8668-1c55d6e1a8a7","Type":"ContainerDied","Data":"4e669d9dab7924ffde5ba905dfed402d643a8bfbec30bede0e88432df10b3f2b"} Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.388672 4922 generic.go:334] "Generic (PLEG): container finished" podID="191228de-91b1-48d9-a5ac-48846dddf3ed" containerID="08bf9bfd078c072ef33d724c9ee636df566a2fd59a2f592dc6c73284ee0025ac" exitCode=0 Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.388727 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-66ef-account-create-tv2kg" event={"ID":"191228de-91b1-48d9-a5ac-48846dddf3ed","Type":"ContainerDied","Data":"08bf9bfd078c072ef33d724c9ee636df566a2fd59a2f592dc6c73284ee0025ac"} Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.392352 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-sb\") pod \"c1063916-4458-4799-aa02-e4b53b5838e6\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.392406 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-dns-svc\") pod \"c1063916-4458-4799-aa02-e4b53b5838e6\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.392432 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-config\") pod \"c1063916-4458-4799-aa02-e4b53b5838e6\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.392449 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-nb\") pod \"c1063916-4458-4799-aa02-e4b53b5838e6\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.392601 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqvvm\" (UniqueName: \"kubernetes.io/projected/c1063916-4458-4799-aa02-e4b53b5838e6-kube-api-access-kqvvm\") pod \"c1063916-4458-4799-aa02-e4b53b5838e6\" (UID: \"c1063916-4458-4799-aa02-e4b53b5838e6\") " Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.402902 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1063916-4458-4799-aa02-e4b53b5838e6-kube-api-access-kqvvm" (OuterVolumeSpecName: "kube-api-access-kqvvm") pod "c1063916-4458-4799-aa02-e4b53b5838e6" (UID: "c1063916-4458-4799-aa02-e4b53b5838e6"). InnerVolumeSpecName "kube-api-access-kqvvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.403237 4922 scope.go:117] "RemoveContainer" containerID="2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.452927 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c1063916-4458-4799-aa02-e4b53b5838e6" (UID: "c1063916-4458-4799-aa02-e4b53b5838e6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.456851 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-config" (OuterVolumeSpecName: "config") pod "c1063916-4458-4799-aa02-e4b53b5838e6" (UID: "c1063916-4458-4799-aa02-e4b53b5838e6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.466969 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c1063916-4458-4799-aa02-e4b53b5838e6" (UID: "c1063916-4458-4799-aa02-e4b53b5838e6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.473760 4922 scope.go:117] "RemoveContainer" containerID="86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b" Sep 29 22:45:50 crc kubenswrapper[4922]: E0929 22:45:50.474185 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b\": container with ID starting with 86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b not found: ID does not exist" containerID="86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.474218 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b"} err="failed to get container status \"86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b\": rpc error: code = NotFound desc = could not find container \"86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b\": container with ID starting with 86e1a83f2d360d401322271806ee8869e92bc1e5769eba4475d3d1ac476c242b not found: ID does not exist" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.474239 4922 scope.go:117] "RemoveContainer" containerID="2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c" Sep 29 22:45:50 crc kubenswrapper[4922]: E0929 22:45:50.474593 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c\": container with ID starting with 2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c not found: ID does not exist" containerID="2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.474648 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c"} err="failed to get container status \"2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c\": rpc error: code = NotFound desc = could not find container \"2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c\": container with ID starting with 2a560a70b730299d4c2eab86b4a807d65a4e35a2336a51a6ef6c0376e357099c not found: ID does not exist" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.476871 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c1063916-4458-4799-aa02-e4b53b5838e6" (UID: "c1063916-4458-4799-aa02-e4b53b5838e6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.494623 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqvvm\" (UniqueName: \"kubernetes.io/projected/c1063916-4458-4799-aa02-e4b53b5838e6-kube-api-access-kqvvm\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.494644 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.494652 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.494662 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.494671 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1063916-4458-4799-aa02-e4b53b5838e6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.714319 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-nrr6k-config-c54ts"] Sep 29 22:45:50 crc kubenswrapper[4922]: W0929 22:45:50.728719 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1c4297d_d04d_4da5_8929_25580ef2c56b.slice/crio-d6661b1d4ec1318e2a17cb27dbc2127422c9becdf6297765fda3f4150f9568ba WatchSource:0}: Error finding container d6661b1d4ec1318e2a17cb27dbc2127422c9becdf6297765fda3f4150f9568ba: Status 404 returned error can't find the container with id d6661b1d4ec1318e2a17cb27dbc2127422c9becdf6297765fda3f4150f9568ba Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.878943 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-z4cb8"] Sep 29 22:45:50 crc kubenswrapper[4922]: I0929 22:45:50.887001 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-z4cb8"] Sep 29 22:45:51 crc kubenswrapper[4922]: I0929 22:45:51.398760 4922 generic.go:334] "Generic (PLEG): container finished" podID="d1c4297d-d04d-4da5-8929-25580ef2c56b" containerID="a8fb0d82dbf0af1564f72619d645eb92b967e7d87ae89691bd4dfff51a6e825b" exitCode=0 Sep 29 22:45:51 crc kubenswrapper[4922]: I0929 22:45:51.399679 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nrr6k-config-c54ts" event={"ID":"d1c4297d-d04d-4da5-8929-25580ef2c56b","Type":"ContainerDied","Data":"a8fb0d82dbf0af1564f72619d645eb92b967e7d87ae89691bd4dfff51a6e825b"} Sep 29 22:45:51 crc kubenswrapper[4922]: I0929 22:45:51.399721 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nrr6k-config-c54ts" event={"ID":"d1c4297d-d04d-4da5-8929-25580ef2c56b","Type":"ContainerStarted","Data":"d6661b1d4ec1318e2a17cb27dbc2127422c9becdf6297765fda3f4150f9568ba"} Sep 29 22:45:51 crc kubenswrapper[4922]: I0929 22:45:51.903187 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-66ef-account-create-tv2kg" Sep 29 22:45:51 crc kubenswrapper[4922]: I0929 22:45:51.918603 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-dc06-account-create-ls6hh" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.025157 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbv7w\" (UniqueName: \"kubernetes.io/projected/71a94cd2-8bcf-4026-8668-1c55d6e1a8a7-kube-api-access-nbv7w\") pod \"71a94cd2-8bcf-4026-8668-1c55d6e1a8a7\" (UID: \"71a94cd2-8bcf-4026-8668-1c55d6e1a8a7\") " Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.025472 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb2v6\" (UniqueName: \"kubernetes.io/projected/191228de-91b1-48d9-a5ac-48846dddf3ed-kube-api-access-kb2v6\") pod \"191228de-91b1-48d9-a5ac-48846dddf3ed\" (UID: \"191228de-91b1-48d9-a5ac-48846dddf3ed\") " Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.034929 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71a94cd2-8bcf-4026-8668-1c55d6e1a8a7-kube-api-access-nbv7w" (OuterVolumeSpecName: "kube-api-access-nbv7w") pod "71a94cd2-8bcf-4026-8668-1c55d6e1a8a7" (UID: "71a94cd2-8bcf-4026-8668-1c55d6e1a8a7"). InnerVolumeSpecName "kube-api-access-nbv7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.035155 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/191228de-91b1-48d9-a5ac-48846dddf3ed-kube-api-access-kb2v6" (OuterVolumeSpecName: "kube-api-access-kb2v6") pod "191228de-91b1-48d9-a5ac-48846dddf3ed" (UID: "191228de-91b1-48d9-a5ac-48846dddf3ed"). InnerVolumeSpecName "kube-api-access-kb2v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.127528 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbv7w\" (UniqueName: \"kubernetes.io/projected/71a94cd2-8bcf-4026-8668-1c55d6e1a8a7-kube-api-access-nbv7w\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.127572 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb2v6\" (UniqueName: \"kubernetes.io/projected/191228de-91b1-48d9-a5ac-48846dddf3ed-kube-api-access-kb2v6\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.409872 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-66ef-account-create-tv2kg" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.409995 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-66ef-account-create-tv2kg" event={"ID":"191228de-91b1-48d9-a5ac-48846dddf3ed","Type":"ContainerDied","Data":"375eba71476231652195cbf6490693d29768caf307cc54ab44148212a89bda1e"} Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.410051 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="375eba71476231652195cbf6490693d29768caf307cc54ab44148212a89bda1e" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.412391 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-dc06-account-create-ls6hh" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.412425 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-dc06-account-create-ls6hh" event={"ID":"71a94cd2-8bcf-4026-8668-1c55d6e1a8a7","Type":"ContainerDied","Data":"897f399db2117a28ff17086ad3a42422dc0a9004c2052104df75fe279c701b21"} Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.412492 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="897f399db2117a28ff17086ad3a42422dc0a9004c2052104df75fe279c701b21" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.442658 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1063916-4458-4799-aa02-e4b53b5838e6" path="/var/lib/kubelet/pods/c1063916-4458-4799-aa02-e4b53b5838e6/volumes" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.874027 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.876367 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-185c-account-create-6bgh5"] Sep 29 22:45:52 crc kubenswrapper[4922]: E0929 22:45:52.876794 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="191228de-91b1-48d9-a5ac-48846dddf3ed" containerName="mariadb-account-create" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.876814 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="191228de-91b1-48d9-a5ac-48846dddf3ed" containerName="mariadb-account-create" Sep 29 22:45:52 crc kubenswrapper[4922]: E0929 22:45:52.876833 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71a94cd2-8bcf-4026-8668-1c55d6e1a8a7" containerName="mariadb-account-create" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.876841 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="71a94cd2-8bcf-4026-8668-1c55d6e1a8a7" containerName="mariadb-account-create" Sep 29 22:45:52 crc kubenswrapper[4922]: E0929 22:45:52.876861 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1063916-4458-4799-aa02-e4b53b5838e6" containerName="init" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.876868 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1063916-4458-4799-aa02-e4b53b5838e6" containerName="init" Sep 29 22:45:52 crc kubenswrapper[4922]: E0929 22:45:52.876879 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1063916-4458-4799-aa02-e4b53b5838e6" containerName="dnsmasq-dns" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.876888 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1063916-4458-4799-aa02-e4b53b5838e6" containerName="dnsmasq-dns" Sep 29 22:45:52 crc kubenswrapper[4922]: E0929 22:45:52.876904 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1c4297d-d04d-4da5-8929-25580ef2c56b" containerName="ovn-config" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.876911 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1c4297d-d04d-4da5-8929-25580ef2c56b" containerName="ovn-config" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.878288 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="191228de-91b1-48d9-a5ac-48846dddf3ed" containerName="mariadb-account-create" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.878316 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1063916-4458-4799-aa02-e4b53b5838e6" containerName="dnsmasq-dns" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.878332 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="71a94cd2-8bcf-4026-8668-1c55d6e1a8a7" containerName="mariadb-account-create" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.878349 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1c4297d-d04d-4da5-8929-25580ef2c56b" containerName="ovn-config" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.878977 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-185c-account-create-6bgh5" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.883120 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 29 22:45:52 crc kubenswrapper[4922]: I0929 22:45:52.884169 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-185c-account-create-6bgh5"] Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.065775 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-additional-scripts\") pod \"d1c4297d-d04d-4da5-8929-25580ef2c56b\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066135 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run-ovn\") pod \"d1c4297d-d04d-4da5-8929-25580ef2c56b\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066191 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-scripts\") pod \"d1c4297d-d04d-4da5-8929-25580ef2c56b\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066221 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run\") pod \"d1c4297d-d04d-4da5-8929-25580ef2c56b\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066272 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz9vg\" (UniqueName: \"kubernetes.io/projected/d1c4297d-d04d-4da5-8929-25580ef2c56b-kube-api-access-wz9vg\") pod \"d1c4297d-d04d-4da5-8929-25580ef2c56b\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066296 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-log-ovn\") pod \"d1c4297d-d04d-4da5-8929-25580ef2c56b\" (UID: \"d1c4297d-d04d-4da5-8929-25580ef2c56b\") " Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066288 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "d1c4297d-d04d-4da5-8929-25580ef2c56b" (UID: "d1c4297d-d04d-4da5-8929-25580ef2c56b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066367 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run" (OuterVolumeSpecName: "var-run") pod "d1c4297d-d04d-4da5-8929-25580ef2c56b" (UID: "d1c4297d-d04d-4da5-8929-25580ef2c56b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066515 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "d1c4297d-d04d-4da5-8929-25580ef2c56b" (UID: "d1c4297d-d04d-4da5-8929-25580ef2c56b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066550 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whm57\" (UniqueName: \"kubernetes.io/projected/e3d2683e-4488-4dc2-8362-75b9068ce7e6-kube-api-access-whm57\") pod \"glance-185c-account-create-6bgh5\" (UID: \"e3d2683e-4488-4dc2-8362-75b9068ce7e6\") " pod="openstack/glance-185c-account-create-6bgh5" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066938 4922 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066969 4922 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.066979 4922 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d1c4297d-d04d-4da5-8929-25580ef2c56b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.067680 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "d1c4297d-d04d-4da5-8929-25580ef2c56b" (UID: "d1c4297d-d04d-4da5-8929-25580ef2c56b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.067803 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-scripts" (OuterVolumeSpecName: "scripts") pod "d1c4297d-d04d-4da5-8929-25580ef2c56b" (UID: "d1c4297d-d04d-4da5-8929-25580ef2c56b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.092897 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1c4297d-d04d-4da5-8929-25580ef2c56b-kube-api-access-wz9vg" (OuterVolumeSpecName: "kube-api-access-wz9vg") pod "d1c4297d-d04d-4da5-8929-25580ef2c56b" (UID: "d1c4297d-d04d-4da5-8929-25580ef2c56b"). InnerVolumeSpecName "kube-api-access-wz9vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.169018 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whm57\" (UniqueName: \"kubernetes.io/projected/e3d2683e-4488-4dc2-8362-75b9068ce7e6-kube-api-access-whm57\") pod \"glance-185c-account-create-6bgh5\" (UID: \"e3d2683e-4488-4dc2-8362-75b9068ce7e6\") " pod="openstack/glance-185c-account-create-6bgh5" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.169232 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.169252 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz9vg\" (UniqueName: \"kubernetes.io/projected/d1c4297d-d04d-4da5-8929-25580ef2c56b-kube-api-access-wz9vg\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.169268 4922 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d1c4297d-d04d-4da5-8929-25580ef2c56b-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.207783 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whm57\" (UniqueName: \"kubernetes.io/projected/e3d2683e-4488-4dc2-8362-75b9068ce7e6-kube-api-access-whm57\") pod \"glance-185c-account-create-6bgh5\" (UID: \"e3d2683e-4488-4dc2-8362-75b9068ce7e6\") " pod="openstack/glance-185c-account-create-6bgh5" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.208732 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-185c-account-create-6bgh5" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.423450 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nrr6k-config-c54ts" event={"ID":"d1c4297d-d04d-4da5-8929-25580ef2c56b","Type":"ContainerDied","Data":"d6661b1d4ec1318e2a17cb27dbc2127422c9becdf6297765fda3f4150f9568ba"} Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.423496 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6661b1d4ec1318e2a17cb27dbc2127422c9becdf6297765fda3f4150f9568ba" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.423589 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nrr6k-config-c54ts" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.498075 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.715833 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-7pc4b"] Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.717191 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7pc4b" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.727188 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7pc4b"] Sep 29 22:45:53 crc kubenswrapper[4922]: W0929 22:45:53.762540 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3d2683e_4488_4dc2_8362_75b9068ce7e6.slice/crio-7bc8de415afe7ecc812a3f2e494845641956df42c64fa549b34ac67b02aac3f5 WatchSource:0}: Error finding container 7bc8de415afe7ecc812a3f2e494845641956df42c64fa549b34ac67b02aac3f5: Status 404 returned error can't find the container with id 7bc8de415afe7ecc812a3f2e494845641956df42c64fa549b34ac67b02aac3f5 Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.766905 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-185c-account-create-6bgh5"] Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.885312 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9n27z\" (UniqueName: \"kubernetes.io/projected/cb59351d-3f5c-457d-a010-a5f48104cd03-kube-api-access-9n27z\") pod \"barbican-db-create-7pc4b\" (UID: \"cb59351d-3f5c-457d-a010-a5f48104cd03\") " pod="openstack/barbican-db-create-7pc4b" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.928376 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-5d7mn"] Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.929542 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-5d7mn" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.952009 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-5d7mn"] Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.988350 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9n27z\" (UniqueName: \"kubernetes.io/projected/cb59351d-3f5c-457d-a010-a5f48104cd03-kube-api-access-9n27z\") pod \"barbican-db-create-7pc4b\" (UID: \"cb59351d-3f5c-457d-a010-a5f48104cd03\") " pod="openstack/barbican-db-create-7pc4b" Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.991530 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-nrr6k-config-c54ts"] Sep 29 22:45:53 crc kubenswrapper[4922]: I0929 22:45:53.996123 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-nrr6k-config-c54ts"] Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.011765 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9n27z\" (UniqueName: \"kubernetes.io/projected/cb59351d-3f5c-457d-a010-a5f48104cd03-kube-api-access-9n27z\") pod \"barbican-db-create-7pc4b\" (UID: \"cb59351d-3f5c-457d-a010-a5f48104cd03\") " pod="openstack/barbican-db-create-7pc4b" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.017024 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-r5zbt"] Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.019679 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-r5zbt" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.031473 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-r5zbt"] Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.040207 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7pc4b" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.097106 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rk6l\" (UniqueName: \"kubernetes.io/projected/9dc3f0ae-3193-44e1-9bad-edbbd00a94ea-kube-api-access-5rk6l\") pod \"cinder-db-create-5d7mn\" (UID: \"9dc3f0ae-3193-44e1-9bad-edbbd00a94ea\") " pod="openstack/cinder-db-create-5d7mn" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.199724 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j2r8\" (UniqueName: \"kubernetes.io/projected/e02eaa01-5408-4268-b8ac-d7bd7917d1c6-kube-api-access-6j2r8\") pod \"neutron-db-create-r5zbt\" (UID: \"e02eaa01-5408-4268-b8ac-d7bd7917d1c6\") " pod="openstack/neutron-db-create-r5zbt" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.199885 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rk6l\" (UniqueName: \"kubernetes.io/projected/9dc3f0ae-3193-44e1-9bad-edbbd00a94ea-kube-api-access-5rk6l\") pod \"cinder-db-create-5d7mn\" (UID: \"9dc3f0ae-3193-44e1-9bad-edbbd00a94ea\") " pod="openstack/cinder-db-create-5d7mn" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.212245 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.220167 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rk6l\" (UniqueName: \"kubernetes.io/projected/9dc3f0ae-3193-44e1-9bad-edbbd00a94ea-kube-api-access-5rk6l\") pod \"cinder-db-create-5d7mn\" (UID: \"9dc3f0ae-3193-44e1-9bad-edbbd00a94ea\") " pod="openstack/cinder-db-create-5d7mn" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.275860 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-5d7mn" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.304239 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j2r8\" (UniqueName: \"kubernetes.io/projected/e02eaa01-5408-4268-b8ac-d7bd7917d1c6-kube-api-access-6j2r8\") pod \"neutron-db-create-r5zbt\" (UID: \"e02eaa01-5408-4268-b8ac-d7bd7917d1c6\") " pod="openstack/neutron-db-create-r5zbt" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.319057 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j2r8\" (UniqueName: \"kubernetes.io/projected/e02eaa01-5408-4268-b8ac-d7bd7917d1c6-kube-api-access-6j2r8\") pod \"neutron-db-create-r5zbt\" (UID: \"e02eaa01-5408-4268-b8ac-d7bd7917d1c6\") " pod="openstack/neutron-db-create-r5zbt" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.433955 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1c4297d-d04d-4da5-8929-25580ef2c56b" path="/var/lib/kubelet/pods/d1c4297d-d04d-4da5-8929-25580ef2c56b/volumes" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.436198 4922 generic.go:334] "Generic (PLEG): container finished" podID="e3d2683e-4488-4dc2-8362-75b9068ce7e6" containerID="9c6ded9f84dde5456d83aa657b28ed7e35c6018741c5d3cd569e5fede1321c6c" exitCode=0 Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.436224 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-185c-account-create-6bgh5" event={"ID":"e3d2683e-4488-4dc2-8362-75b9068ce7e6","Type":"ContainerDied","Data":"9c6ded9f84dde5456d83aa657b28ed7e35c6018741c5d3cd569e5fede1321c6c"} Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.436243 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-185c-account-create-6bgh5" event={"ID":"e3d2683e-4488-4dc2-8362-75b9068ce7e6","Type":"ContainerStarted","Data":"7bc8de415afe7ecc812a3f2e494845641956df42c64fa549b34ac67b02aac3f5"} Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.441231 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-r5zbt" Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.458544 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7pc4b"] Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.505632 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-5d7mn"] Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.692713 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-r5zbt"] Sep 29 22:45:54 crc kubenswrapper[4922]: W0929 22:45:54.732317 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode02eaa01_5408_4268_b8ac_d7bd7917d1c6.slice/crio-c0b64151d40a68d53771636f1b2c51ae146358df2c7b2286675028dfc48ff696 WatchSource:0}: Error finding container c0b64151d40a68d53771636f1b2c51ae146358df2c7b2286675028dfc48ff696: Status 404 returned error can't find the container with id c0b64151d40a68d53771636f1b2c51ae146358df2c7b2286675028dfc48ff696 Sep 29 22:45:54 crc kubenswrapper[4922]: I0929 22:45:54.969182 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-nrr6k" Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.445814 4922 generic.go:334] "Generic (PLEG): container finished" podID="e02eaa01-5408-4268-b8ac-d7bd7917d1c6" containerID="09ecdc04d13e062d78a3247657034bae626cf7a3bb7d11f959a60cb773312999" exitCode=0 Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.445878 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-r5zbt" event={"ID":"e02eaa01-5408-4268-b8ac-d7bd7917d1c6","Type":"ContainerDied","Data":"09ecdc04d13e062d78a3247657034bae626cf7a3bb7d11f959a60cb773312999"} Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.446172 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-r5zbt" event={"ID":"e02eaa01-5408-4268-b8ac-d7bd7917d1c6","Type":"ContainerStarted","Data":"c0b64151d40a68d53771636f1b2c51ae146358df2c7b2286675028dfc48ff696"} Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.447944 4922 generic.go:334] "Generic (PLEG): container finished" podID="90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" containerID="15442b286cfb918d0b410822fb8fe774891b5bb95ac0d45341c5a3d2baaa7d1c" exitCode=0 Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.448024 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9lrwb" event={"ID":"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0","Type":"ContainerDied","Data":"15442b286cfb918d0b410822fb8fe774891b5bb95ac0d45341c5a3d2baaa7d1c"} Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.449767 4922 generic.go:334] "Generic (PLEG): container finished" podID="cb59351d-3f5c-457d-a010-a5f48104cd03" containerID="b7791e3489be4c7ce6ecd6c1df17b8120899bfaa7aeb1826665536cc3f326419" exitCode=0 Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.449873 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7pc4b" event={"ID":"cb59351d-3f5c-457d-a010-a5f48104cd03","Type":"ContainerDied","Data":"b7791e3489be4c7ce6ecd6c1df17b8120899bfaa7aeb1826665536cc3f326419"} Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.449906 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7pc4b" event={"ID":"cb59351d-3f5c-457d-a010-a5f48104cd03","Type":"ContainerStarted","Data":"38a9eb26b13d11a51eed432f8835273d5fadd0a6594900bb674e2832b473cbbe"} Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.452351 4922 generic.go:334] "Generic (PLEG): container finished" podID="9dc3f0ae-3193-44e1-9bad-edbbd00a94ea" containerID="c372429d359f29b5fb62f37582e449b4609c3a2ef89b7a71ebeb759be5b01361" exitCode=0 Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.452484 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-5d7mn" event={"ID":"9dc3f0ae-3193-44e1-9bad-edbbd00a94ea","Type":"ContainerDied","Data":"c372429d359f29b5fb62f37582e449b4609c3a2ef89b7a71ebeb759be5b01361"} Sep 29 22:45:55 crc kubenswrapper[4922]: I0929 22:45:55.452580 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-5d7mn" event={"ID":"9dc3f0ae-3193-44e1-9bad-edbbd00a94ea","Type":"ContainerStarted","Data":"39f64f9fda8ace4d7268e255de3c32165b52ccc48ab1144f5c8f74964dee0186"} Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:55.800324 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-185c-account-create-6bgh5" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:55.936192 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whm57\" (UniqueName: \"kubernetes.io/projected/e3d2683e-4488-4dc2-8362-75b9068ce7e6-kube-api-access-whm57\") pod \"e3d2683e-4488-4dc2-8362-75b9068ce7e6\" (UID: \"e3d2683e-4488-4dc2-8362-75b9068ce7e6\") " Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:55.952576 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3d2683e-4488-4dc2-8362-75b9068ce7e6-kube-api-access-whm57" (OuterVolumeSpecName: "kube-api-access-whm57") pod "e3d2683e-4488-4dc2-8362-75b9068ce7e6" (UID: "e3d2683e-4488-4dc2-8362-75b9068ce7e6"). InnerVolumeSpecName "kube-api-access-whm57". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.038128 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whm57\" (UniqueName: \"kubernetes.io/projected/e3d2683e-4488-4dc2-8362-75b9068ce7e6-kube-api-access-whm57\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.139634 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.159220 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") pod \"swift-storage-0\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " pod="openstack/swift-storage-0" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.233751 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.462520 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-185c-account-create-6bgh5" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.463210 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-185c-account-create-6bgh5" event={"ID":"e3d2683e-4488-4dc2-8362-75b9068ce7e6","Type":"ContainerDied","Data":"7bc8de415afe7ecc812a3f2e494845641956df42c64fa549b34ac67b02aac3f5"} Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.463237 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bc8de415afe7ecc812a3f2e494845641956df42c64fa549b34ac67b02aac3f5" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.591655 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 22:45:56 crc kubenswrapper[4922]: W0929 22:45:56.598592 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0b8c219_afd8_41e5_a9d7_686c7b70fd70.slice/crio-363e32a531e3f6bd3048514b38ed84553dc6087d4cb48d0b444b45d8e462c56f WatchSource:0}: Error finding container 363e32a531e3f6bd3048514b38ed84553dc6087d4cb48d0b444b45d8e462c56f: Status 404 returned error can't find the container with id 363e32a531e3f6bd3048514b38ed84553dc6087d4cb48d0b444b45d8e462c56f Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.791868 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.902434 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7pc4b" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.923712 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-r5zbt" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.945789 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-5d7mn" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.953028 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-dispersionconf\") pod \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.953107 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-scripts\") pod \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.953165 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-combined-ca-bundle\") pod \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.953216 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lxpr\" (UniqueName: \"kubernetes.io/projected/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-kube-api-access-2lxpr\") pod \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.953267 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-etc-swift\") pod \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.953339 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-swiftconf\") pod \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.953367 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-ring-data-devices\") pod \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\" (UID: \"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0\") " Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.954358 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" (UID: "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.954413 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" (UID: "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.960846 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-kube-api-access-2lxpr" (OuterVolumeSpecName: "kube-api-access-2lxpr") pod "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" (UID: "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0"). InnerVolumeSpecName "kube-api-access-2lxpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.974456 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" (UID: "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.977545 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" (UID: "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.979958 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" (UID: "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:45:56 crc kubenswrapper[4922]: I0929 22:45:56.983090 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-scripts" (OuterVolumeSpecName: "scripts") pod "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" (UID: "90cbdc4e-53d3-4732-9239-6e2a46dcd4b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.056894 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6j2r8\" (UniqueName: \"kubernetes.io/projected/e02eaa01-5408-4268-b8ac-d7bd7917d1c6-kube-api-access-6j2r8\") pod \"e02eaa01-5408-4268-b8ac-d7bd7917d1c6\" (UID: \"e02eaa01-5408-4268-b8ac-d7bd7917d1c6\") " Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.057290 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rk6l\" (UniqueName: \"kubernetes.io/projected/9dc3f0ae-3193-44e1-9bad-edbbd00a94ea-kube-api-access-5rk6l\") pod \"9dc3f0ae-3193-44e1-9bad-edbbd00a94ea\" (UID: \"9dc3f0ae-3193-44e1-9bad-edbbd00a94ea\") " Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.057446 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9n27z\" (UniqueName: \"kubernetes.io/projected/cb59351d-3f5c-457d-a010-a5f48104cd03-kube-api-access-9n27z\") pod \"cb59351d-3f5c-457d-a010-a5f48104cd03\" (UID: \"cb59351d-3f5c-457d-a010-a5f48104cd03\") " Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.057905 4922 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.058003 4922 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.058085 4922 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.058179 4922 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.058271 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.058370 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.058521 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lxpr\" (UniqueName: \"kubernetes.io/projected/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0-kube-api-access-2lxpr\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.061093 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb59351d-3f5c-457d-a010-a5f48104cd03-kube-api-access-9n27z" (OuterVolumeSpecName: "kube-api-access-9n27z") pod "cb59351d-3f5c-457d-a010-a5f48104cd03" (UID: "cb59351d-3f5c-457d-a010-a5f48104cd03"). InnerVolumeSpecName "kube-api-access-9n27z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.062129 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e02eaa01-5408-4268-b8ac-d7bd7917d1c6-kube-api-access-6j2r8" (OuterVolumeSpecName: "kube-api-access-6j2r8") pod "e02eaa01-5408-4268-b8ac-d7bd7917d1c6" (UID: "e02eaa01-5408-4268-b8ac-d7bd7917d1c6"). InnerVolumeSpecName "kube-api-access-6j2r8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.062895 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dc3f0ae-3193-44e1-9bad-edbbd00a94ea-kube-api-access-5rk6l" (OuterVolumeSpecName: "kube-api-access-5rk6l") pod "9dc3f0ae-3193-44e1-9bad-edbbd00a94ea" (UID: "9dc3f0ae-3193-44e1-9bad-edbbd00a94ea"). InnerVolumeSpecName "kube-api-access-5rk6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.160524 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rk6l\" (UniqueName: \"kubernetes.io/projected/9dc3f0ae-3193-44e1-9bad-edbbd00a94ea-kube-api-access-5rk6l\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.160588 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9n27z\" (UniqueName: \"kubernetes.io/projected/cb59351d-3f5c-457d-a010-a5f48104cd03-kube-api-access-9n27z\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.160604 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6j2r8\" (UniqueName: \"kubernetes.io/projected/e02eaa01-5408-4268-b8ac-d7bd7917d1c6-kube-api-access-6j2r8\") on node \"crc\" DevicePath \"\"" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.472742 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-5d7mn" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.472733 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-5d7mn" event={"ID":"9dc3f0ae-3193-44e1-9bad-edbbd00a94ea","Type":"ContainerDied","Data":"39f64f9fda8ace4d7268e255de3c32165b52ccc48ab1144f5c8f74964dee0186"} Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.472818 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39f64f9fda8ace4d7268e255de3c32165b52ccc48ab1144f5c8f74964dee0186" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.485307 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"363e32a531e3f6bd3048514b38ed84553dc6087d4cb48d0b444b45d8e462c56f"} Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.490172 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-r5zbt" event={"ID":"e02eaa01-5408-4268-b8ac-d7bd7917d1c6","Type":"ContainerDied","Data":"c0b64151d40a68d53771636f1b2c51ae146358df2c7b2286675028dfc48ff696"} Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.490208 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0b64151d40a68d53771636f1b2c51ae146358df2c7b2286675028dfc48ff696" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.490274 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-r5zbt" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.495642 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9lrwb" event={"ID":"90cbdc4e-53d3-4732-9239-6e2a46dcd4b0","Type":"ContainerDied","Data":"6429b43056a992a2cffdcd9eddb0cb44a87d6e9b791d24d49a0071a169fc6050"} Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.495692 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6429b43056a992a2cffdcd9eddb0cb44a87d6e9b791d24d49a0071a169fc6050" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.495758 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9lrwb" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.503453 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7pc4b" event={"ID":"cb59351d-3f5c-457d-a010-a5f48104cd03","Type":"ContainerDied","Data":"38a9eb26b13d11a51eed432f8835273d5fadd0a6594900bb674e2832b473cbbe"} Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.503701 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38a9eb26b13d11a51eed432f8835273d5fadd0a6594900bb674e2832b473cbbe" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.503818 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7pc4b" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.711893 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-fktvf"] Sep 29 22:45:57 crc kubenswrapper[4922]: E0929 22:45:57.712183 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dc3f0ae-3193-44e1-9bad-edbbd00a94ea" containerName="mariadb-database-create" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712199 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dc3f0ae-3193-44e1-9bad-edbbd00a94ea" containerName="mariadb-database-create" Sep 29 22:45:57 crc kubenswrapper[4922]: E0929 22:45:57.712213 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e02eaa01-5408-4268-b8ac-d7bd7917d1c6" containerName="mariadb-database-create" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712220 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e02eaa01-5408-4268-b8ac-d7bd7917d1c6" containerName="mariadb-database-create" Sep 29 22:45:57 crc kubenswrapper[4922]: E0929 22:45:57.712230 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3d2683e-4488-4dc2-8362-75b9068ce7e6" containerName="mariadb-account-create" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712237 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3d2683e-4488-4dc2-8362-75b9068ce7e6" containerName="mariadb-account-create" Sep 29 22:45:57 crc kubenswrapper[4922]: E0929 22:45:57.712251 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" containerName="swift-ring-rebalance" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712257 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" containerName="swift-ring-rebalance" Sep 29 22:45:57 crc kubenswrapper[4922]: E0929 22:45:57.712267 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb59351d-3f5c-457d-a010-a5f48104cd03" containerName="mariadb-database-create" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712272 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb59351d-3f5c-457d-a010-a5f48104cd03" containerName="mariadb-database-create" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712419 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3d2683e-4488-4dc2-8362-75b9068ce7e6" containerName="mariadb-account-create" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712431 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dc3f0ae-3193-44e1-9bad-edbbd00a94ea" containerName="mariadb-database-create" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712441 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb59351d-3f5c-457d-a010-a5f48104cd03" containerName="mariadb-database-create" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712455 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" containerName="swift-ring-rebalance" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712466 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e02eaa01-5408-4268-b8ac-d7bd7917d1c6" containerName="mariadb-database-create" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.712913 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.714991 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.715223 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vvbbl" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.715350 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.716059 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.728764 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fktvf"] Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.871592 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-config-data\") pod \"keystone-db-sync-fktvf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.871684 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-combined-ca-bundle\") pod \"keystone-db-sync-fktvf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.872069 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x76sf\" (UniqueName: \"kubernetes.io/projected/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-kube-api-access-x76sf\") pod \"keystone-db-sync-fktvf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.973820 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x76sf\" (UniqueName: \"kubernetes.io/projected/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-kube-api-access-x76sf\") pod \"keystone-db-sync-fktvf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.974204 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-config-data\") pod \"keystone-db-sync-fktvf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.974243 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-combined-ca-bundle\") pod \"keystone-db-sync-fktvf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.980692 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-combined-ca-bundle\") pod \"keystone-db-sync-fktvf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:57 crc kubenswrapper[4922]: I0929 22:45:57.990338 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-config-data\") pod \"keystone-db-sync-fktvf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.006285 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x76sf\" (UniqueName: \"kubernetes.io/projected/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-kube-api-access-x76sf\") pod \"keystone-db-sync-fktvf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.061517 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fktvf" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.073764 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-6qjb4"] Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.074733 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.077376 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2j6pm" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.077565 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.100424 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-6qjb4"] Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.176439 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4btmw\" (UniqueName: \"kubernetes.io/projected/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-kube-api-access-4btmw\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.176490 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-combined-ca-bundle\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.176568 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-config-data\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.176620 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-db-sync-config-data\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.278327 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-db-sync-config-data\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.278428 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4btmw\" (UniqueName: \"kubernetes.io/projected/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-kube-api-access-4btmw\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.278446 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-combined-ca-bundle\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.278496 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-config-data\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.288245 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-combined-ca-bundle\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.288318 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-config-data\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.293550 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-db-sync-config-data\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.295036 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4btmw\" (UniqueName: \"kubernetes.io/projected/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-kube-api-access-4btmw\") pod \"glance-db-sync-6qjb4\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.484292 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6qjb4" Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.504291 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-fktvf"] Sep 29 22:45:58 crc kubenswrapper[4922]: W0929 22:45:58.509999 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91ff7529_975a_44c8_a1dd_b7fd1f8708cf.slice/crio-4c48f7d095b61b7dc0f105fa264612cc62f86f82414223dd0121bd55b729f272 WatchSource:0}: Error finding container 4c48f7d095b61b7dc0f105fa264612cc62f86f82414223dd0121bd55b729f272: Status 404 returned error can't find the container with id 4c48f7d095b61b7dc0f105fa264612cc62f86f82414223dd0121bd55b729f272 Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.516920 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"f166640120faeaa707308969f390573411f97a3309e54ac63df05aebb3f19824"} Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.516962 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"394ac56913d9a5c9d5e8f0211780ebf922fd0554782e59a3d6d87d16da29195d"} Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.516972 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"d2f084fa2f64aff150659598e27fe358fc89e0c61c6100a7520978fcf0f7a916"} Sep 29 22:45:58 crc kubenswrapper[4922]: I0929 22:45:58.516982 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"6e52d87702c312bbb2e29a490519b8aa109bb12950e8b0a94d326f1b63f93999"} Sep 29 22:45:59 crc kubenswrapper[4922]: I0929 22:45:59.092743 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-6qjb4"] Sep 29 22:45:59 crc kubenswrapper[4922]: W0929 22:45:59.101138 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2dc887c5_8fe3_46a0_af31_64c0b95dfcbf.slice/crio-7cff0f84b5b6049d342df02be257a9f0688c851c22880e0d574daa06594f0eb6 WatchSource:0}: Error finding container 7cff0f84b5b6049d342df02be257a9f0688c851c22880e0d574daa06594f0eb6: Status 404 returned error can't find the container with id 7cff0f84b5b6049d342df02be257a9f0688c851c22880e0d574daa06594f0eb6 Sep 29 22:45:59 crc kubenswrapper[4922]: I0929 22:45:59.527143 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6qjb4" event={"ID":"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf","Type":"ContainerStarted","Data":"7cff0f84b5b6049d342df02be257a9f0688c851c22880e0d574daa06594f0eb6"} Sep 29 22:45:59 crc kubenswrapper[4922]: I0929 22:45:59.528345 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fktvf" event={"ID":"91ff7529-975a-44c8-a1dd-b7fd1f8708cf","Type":"ContainerStarted","Data":"4c48f7d095b61b7dc0f105fa264612cc62f86f82414223dd0121bd55b729f272"} Sep 29 22:46:00 crc kubenswrapper[4922]: I0929 22:46:00.545228 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"aed3f79c8434a0f0105df5fe72412ca9bc5f53d2f122d6b27023c5a8f5c61342"} Sep 29 22:46:00 crc kubenswrapper[4922]: I0929 22:46:00.545959 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"e249fe7a191a944cff40c8c92e3c4958f89cf9fbd1f5d1322ff75e0f69defdff"} Sep 29 22:46:00 crc kubenswrapper[4922]: I0929 22:46:00.545981 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"4675d2c0679cc4f58a6d8737c63a65ad973c3433c64759dcea3d5deff22e30fb"} Sep 29 22:46:03 crc kubenswrapper[4922]: I0929 22:46:03.587882 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"54464affff56c245302f16939d7871865704b43ae97eb183cd35b66f93385f35"} Sep 29 22:46:03 crc kubenswrapper[4922]: I0929 22:46:03.589056 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fktvf" event={"ID":"91ff7529-975a-44c8-a1dd-b7fd1f8708cf","Type":"ContainerStarted","Data":"7a4f35c9026c3a753c4df2911e4a273ed8f0fd7ea9968c172667d0dad986cc9e"} Sep 29 22:46:03 crc kubenswrapper[4922]: I0929 22:46:03.606267 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-fktvf" podStartSLOduration=2.399306664 podStartE2EDuration="6.60624986s" podCreationTimestamp="2025-09-29 22:45:57 +0000 UTC" firstStartedPulling="2025-09-29 22:45:58.512133043 +0000 UTC m=+1162.822421866" lastFinishedPulling="2025-09-29 22:46:02.719076249 +0000 UTC m=+1167.029365062" observedRunningTime="2025-09-29 22:46:03.602938989 +0000 UTC m=+1167.913227842" watchObservedRunningTime="2025-09-29 22:46:03.60624986 +0000 UTC m=+1167.916538683" Sep 29 22:46:03 crc kubenswrapper[4922]: I0929 22:46:03.868674 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-3d37-account-create-9lzmk"] Sep 29 22:46:03 crc kubenswrapper[4922]: I0929 22:46:03.869981 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3d37-account-create-9lzmk" Sep 29 22:46:03 crc kubenswrapper[4922]: I0929 22:46:03.871951 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 22:46:03 crc kubenswrapper[4922]: I0929 22:46:03.877984 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-3d37-account-create-9lzmk"] Sep 29 22:46:03 crc kubenswrapper[4922]: I0929 22:46:03.998790 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7p4v\" (UniqueName: \"kubernetes.io/projected/8880043f-1bff-4e22-9f42-06d44ad027f8-kube-api-access-m7p4v\") pod \"barbican-3d37-account-create-9lzmk\" (UID: \"8880043f-1bff-4e22-9f42-06d44ad027f8\") " pod="openstack/barbican-3d37-account-create-9lzmk" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.067561 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-3b06-account-create-4k8fh"] Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.069138 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3b06-account-create-4k8fh" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.071697 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.078245 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3b06-account-create-4k8fh"] Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.100577 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7p4v\" (UniqueName: \"kubernetes.io/projected/8880043f-1bff-4e22-9f42-06d44ad027f8-kube-api-access-m7p4v\") pod \"barbican-3d37-account-create-9lzmk\" (UID: \"8880043f-1bff-4e22-9f42-06d44ad027f8\") " pod="openstack/barbican-3d37-account-create-9lzmk" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.129169 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7p4v\" (UniqueName: \"kubernetes.io/projected/8880043f-1bff-4e22-9f42-06d44ad027f8-kube-api-access-m7p4v\") pod \"barbican-3d37-account-create-9lzmk\" (UID: \"8880043f-1bff-4e22-9f42-06d44ad027f8\") " pod="openstack/barbican-3d37-account-create-9lzmk" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.193639 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3d37-account-create-9lzmk" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.202048 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcfzz\" (UniqueName: \"kubernetes.io/projected/7c673b83-04df-456d-b26c-6384ecfb5924-kube-api-access-mcfzz\") pod \"cinder-3b06-account-create-4k8fh\" (UID: \"7c673b83-04df-456d-b26c-6384ecfb5924\") " pod="openstack/cinder-3b06-account-create-4k8fh" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.263745 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-818d-account-create-rgcn6"] Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.265642 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-818d-account-create-rgcn6" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.270649 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-818d-account-create-rgcn6"] Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.275881 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.303445 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcfzz\" (UniqueName: \"kubernetes.io/projected/7c673b83-04df-456d-b26c-6384ecfb5924-kube-api-access-mcfzz\") pod \"cinder-3b06-account-create-4k8fh\" (UID: \"7c673b83-04df-456d-b26c-6384ecfb5924\") " pod="openstack/cinder-3b06-account-create-4k8fh" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.328836 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcfzz\" (UniqueName: \"kubernetes.io/projected/7c673b83-04df-456d-b26c-6384ecfb5924-kube-api-access-mcfzz\") pod \"cinder-3b06-account-create-4k8fh\" (UID: \"7c673b83-04df-456d-b26c-6384ecfb5924\") " pod="openstack/cinder-3b06-account-create-4k8fh" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.392000 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3b06-account-create-4k8fh" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.406089 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ln5bp\" (UniqueName: \"kubernetes.io/projected/818de8af-e1df-4003-8a42-c0f067a7a0cc-kube-api-access-ln5bp\") pod \"neutron-818d-account-create-rgcn6\" (UID: \"818de8af-e1df-4003-8a42-c0f067a7a0cc\") " pod="openstack/neutron-818d-account-create-rgcn6" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.507597 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ln5bp\" (UniqueName: \"kubernetes.io/projected/818de8af-e1df-4003-8a42-c0f067a7a0cc-kube-api-access-ln5bp\") pod \"neutron-818d-account-create-rgcn6\" (UID: \"818de8af-e1df-4003-8a42-c0f067a7a0cc\") " pod="openstack/neutron-818d-account-create-rgcn6" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.537247 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ln5bp\" (UniqueName: \"kubernetes.io/projected/818de8af-e1df-4003-8a42-c0f067a7a0cc-kube-api-access-ln5bp\") pod \"neutron-818d-account-create-rgcn6\" (UID: \"818de8af-e1df-4003-8a42-c0f067a7a0cc\") " pod="openstack/neutron-818d-account-create-rgcn6" Sep 29 22:46:04 crc kubenswrapper[4922]: I0929 22:46:04.602769 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-818d-account-create-rgcn6" Sep 29 22:46:05 crc kubenswrapper[4922]: I0929 22:46:05.635822 4922 generic.go:334] "Generic (PLEG): container finished" podID="91ff7529-975a-44c8-a1dd-b7fd1f8708cf" containerID="7a4f35c9026c3a753c4df2911e4a273ed8f0fd7ea9968c172667d0dad986cc9e" exitCode=0 Sep 29 22:46:05 crc kubenswrapper[4922]: I0929 22:46:05.635890 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fktvf" event={"ID":"91ff7529-975a-44c8-a1dd-b7fd1f8708cf","Type":"ContainerDied","Data":"7a4f35c9026c3a753c4df2911e4a273ed8f0fd7ea9968c172667d0dad986cc9e"} Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.429620 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fktvf" Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.565086 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-combined-ca-bundle\") pod \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.565160 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x76sf\" (UniqueName: \"kubernetes.io/projected/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-kube-api-access-x76sf\") pod \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.565191 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-config-data\") pod \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\" (UID: \"91ff7529-975a-44c8-a1dd-b7fd1f8708cf\") " Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.573671 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-kube-api-access-x76sf" (OuterVolumeSpecName: "kube-api-access-x76sf") pod "91ff7529-975a-44c8-a1dd-b7fd1f8708cf" (UID: "91ff7529-975a-44c8-a1dd-b7fd1f8708cf"). InnerVolumeSpecName "kube-api-access-x76sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.604206 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91ff7529-975a-44c8-a1dd-b7fd1f8708cf" (UID: "91ff7529-975a-44c8-a1dd-b7fd1f8708cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.618568 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-config-data" (OuterVolumeSpecName: "config-data") pod "91ff7529-975a-44c8-a1dd-b7fd1f8708cf" (UID: "91ff7529-975a-44c8-a1dd-b7fd1f8708cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.671541 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.671586 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x76sf\" (UniqueName: \"kubernetes.io/projected/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-kube-api-access-x76sf\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.671603 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91ff7529-975a-44c8-a1dd-b7fd1f8708cf-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.688000 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-fktvf" event={"ID":"91ff7529-975a-44c8-a1dd-b7fd1f8708cf","Type":"ContainerDied","Data":"4c48f7d095b61b7dc0f105fa264612cc62f86f82414223dd0121bd55b729f272"} Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.688294 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c48f7d095b61b7dc0f105fa264612cc62f86f82414223dd0121bd55b729f272" Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.688038 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-fktvf" Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.868148 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-3d37-account-create-9lzmk"] Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.984066 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-818d-account-create-rgcn6"] Sep 29 22:46:10 crc kubenswrapper[4922]: W0929 22:46:10.992049 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod818de8af_e1df_4003_8a42_c0f067a7a0cc.slice/crio-163fbc442e51614057a6eaee08c9ae8377d58d94b60c405cc64b870c1503b49d WatchSource:0}: Error finding container 163fbc442e51614057a6eaee08c9ae8377d58d94b60c405cc64b870c1503b49d: Status 404 returned error can't find the container with id 163fbc442e51614057a6eaee08c9ae8377d58d94b60c405cc64b870c1503b49d Sep 29 22:46:10 crc kubenswrapper[4922]: I0929 22:46:10.993028 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3b06-account-create-4k8fh"] Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.705547 4922 generic.go:334] "Generic (PLEG): container finished" podID="8880043f-1bff-4e22-9f42-06d44ad027f8" containerID="6a35ad12e5e174b24a23fdc83a0dc7b755b27cbac17636e646224c0b043489ff" exitCode=0 Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.710060 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3d37-account-create-9lzmk" event={"ID":"8880043f-1bff-4e22-9f42-06d44ad027f8","Type":"ContainerDied","Data":"6a35ad12e5e174b24a23fdc83a0dc7b755b27cbac17636e646224c0b043489ff"} Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.710106 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3d37-account-create-9lzmk" event={"ID":"8880043f-1bff-4e22-9f42-06d44ad027f8","Type":"ContainerStarted","Data":"0807e3bedc6df209245796c646365a81d927c461f7ef4c14ca8f199809c42ac2"} Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.717659 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-t5dkr"] Sep 29 22:46:11 crc kubenswrapper[4922]: E0929 22:46:11.718030 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91ff7529-975a-44c8-a1dd-b7fd1f8708cf" containerName="keystone-db-sync" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.718040 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="91ff7529-975a-44c8-a1dd-b7fd1f8708cf" containerName="keystone-db-sync" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.718187 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="91ff7529-975a-44c8-a1dd-b7fd1f8708cf" containerName="keystone-db-sync" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.719034 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.741707 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-t5dkr"] Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.745467 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-bzj7x"] Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.746686 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.748533 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.749181 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"6c064216482398df313773fb9964e1b8586650597558efb0a3a312e7dde29596"} Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.751759 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.751943 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vvbbl" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.752050 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.755987 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3b06-account-create-4k8fh" event={"ID":"7c673b83-04df-456d-b26c-6384ecfb5924","Type":"ContainerStarted","Data":"57ee8be2a2838ea0f44079b410352c4c9e3e3715c82c96b4a7b5124eccabbe06"} Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.756037 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3b06-account-create-4k8fh" event={"ID":"7c673b83-04df-456d-b26c-6384ecfb5924","Type":"ContainerStarted","Data":"b1b5fb7e60c5bfb4d1cfe86d326c0c47740523b9835bb80c2fb862c4be7d2521"} Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.762139 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bzj7x"] Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.763900 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6qjb4" event={"ID":"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf","Type":"ContainerStarted","Data":"fb91a14d5de3eb8dcd551bdc88c45ee6cbe872536cded7b69dbe38d418e4ae80"} Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.770628 4922 generic.go:334] "Generic (PLEG): container finished" podID="818de8af-e1df-4003-8a42-c0f067a7a0cc" containerID="92e45f2986b77c1ac0d506bd56133b91677b767c782a7f4355197ce4f97ee6fc" exitCode=0 Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.770662 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-818d-account-create-rgcn6" event={"ID":"818de8af-e1df-4003-8a42-c0f067a7a0cc","Type":"ContainerDied","Data":"92e45f2986b77c1ac0d506bd56133b91677b767c782a7f4355197ce4f97ee6fc"} Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.770681 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-818d-account-create-rgcn6" event={"ID":"818de8af-e1df-4003-8a42-c0f067a7a0cc","Type":"ContainerStarted","Data":"163fbc442e51614057a6eaee08c9ae8377d58d94b60c405cc64b870c1503b49d"} Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797015 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797079 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txvjp\" (UniqueName: \"kubernetes.io/projected/6692bdb0-7f7c-4540-9c38-55b671525931-kube-api-access-txvjp\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797100 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-config-data\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797121 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7hjc\" (UniqueName: \"kubernetes.io/projected/87541b85-2c02-42b6-a2fa-94730f13932c-kube-api-access-k7hjc\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797137 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-combined-ca-bundle\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797161 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-scripts\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797220 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797248 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-fernet-keys\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797271 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-config\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797311 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.797337 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-credential-keys\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.828649 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-3b06-account-create-4k8fh" podStartSLOduration=7.828632901 podStartE2EDuration="7.828632901s" podCreationTimestamp="2025-09-29 22:46:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:11.821014015 +0000 UTC m=+1176.131302828" watchObservedRunningTime="2025-09-29 22:46:11.828632901 +0000 UTC m=+1176.138921714" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904382 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txvjp\" (UniqueName: \"kubernetes.io/projected/6692bdb0-7f7c-4540-9c38-55b671525931-kube-api-access-txvjp\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904433 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-config-data\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904470 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7hjc\" (UniqueName: \"kubernetes.io/projected/87541b85-2c02-42b6-a2fa-94730f13932c-kube-api-access-k7hjc\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904491 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-combined-ca-bundle\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904544 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-scripts\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904612 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904637 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-fernet-keys\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904659 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-config\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904695 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904722 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-credential-keys\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.904776 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.905919 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-config\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.906748 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-6qjb4" podStartSLOduration=2.4371400899999998 podStartE2EDuration="13.906728204s" podCreationTimestamp="2025-09-29 22:45:58 +0000 UTC" firstStartedPulling="2025-09-29 22:45:59.104113312 +0000 UTC m=+1163.414402155" lastFinishedPulling="2025-09-29 22:46:10.573701456 +0000 UTC m=+1174.883990269" observedRunningTime="2025-09-29 22:46:11.881816404 +0000 UTC m=+1176.192105217" watchObservedRunningTime="2025-09-29 22:46:11.906728204 +0000 UTC m=+1176.217017017" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.907131 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-dns-svc\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.913102 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.913918 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.923118 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-scripts\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.931457 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-config-data\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.932121 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-credential-keys\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.933725 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-combined-ca-bundle\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.941441 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txvjp\" (UniqueName: \"kubernetes.io/projected/6692bdb0-7f7c-4540-9c38-55b671525931-kube-api-access-txvjp\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:11 crc kubenswrapper[4922]: I0929 22:46:11.964987 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7hjc\" (UniqueName: \"kubernetes.io/projected/87541b85-2c02-42b6-a2fa-94730f13932c-kube-api-access-k7hjc\") pod \"dnsmasq-dns-5c9d85d47c-t5dkr\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.003105 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-fernet-keys\") pod \"keystone-bootstrap-bzj7x\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.044444 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.053609 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.060976 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.070716 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.072752 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.072779 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.072915 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.145201 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2jzl\" (UniqueName: \"kubernetes.io/projected/9eda432e-f3e4-4a47-ab28-7175f2e28034-kube-api-access-v2jzl\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.145263 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.145328 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-config-data\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.145344 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.145372 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-scripts\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.145415 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-log-httpd\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.145432 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-run-httpd\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.209948 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-t5dkr"] Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.242433 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-zvnng"] Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.243458 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.246950 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-config-data\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.246986 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.247020 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-scripts\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.247059 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-log-httpd\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.247085 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-run-httpd\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.247120 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2jzl\" (UniqueName: \"kubernetes.io/projected/9eda432e-f3e4-4a47-ab28-7175f2e28034-kube-api-access-v2jzl\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.247147 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.252535 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.254509 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zvnng"] Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.269542 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-vv2bx" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.270106 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.285276 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-config-data\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.286883 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.292735 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-log-httpd\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.292931 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-run-httpd\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.296148 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.304888 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-2d7z8"] Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.306180 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.307419 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-scripts\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.320037 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2jzl\" (UniqueName: \"kubernetes.io/projected/9eda432e-f3e4-4a47-ab28-7175f2e28034-kube-api-access-v2jzl\") pod \"ceilometer-0\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351288 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351351 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-config\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351375 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-scripts\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351417 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtw5p\" (UniqueName: \"kubernetes.io/projected/45cdffc3-9762-433a-b1d8-7e6af001c0ec-kube-api-access-vtw5p\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351434 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-dns-svc\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351471 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-combined-ca-bundle\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351493 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-config-data\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351524 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45cdffc3-9762-433a-b1d8-7e6af001c0ec-logs\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351543 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.351563 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmgd4\" (UniqueName: \"kubernetes.io/projected/cbe053b9-bafb-4a8b-b495-f45b790f531f-kube-api-access-lmgd4\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.352653 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-2d7z8"] Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.424815 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455214 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45cdffc3-9762-433a-b1d8-7e6af001c0ec-logs\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455270 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455304 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmgd4\" (UniqueName: \"kubernetes.io/projected/cbe053b9-bafb-4a8b-b495-f45b790f531f-kube-api-access-lmgd4\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455354 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455406 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-config\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455426 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-scripts\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455466 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-dns-svc\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455484 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtw5p\" (UniqueName: \"kubernetes.io/projected/45cdffc3-9762-433a-b1d8-7e6af001c0ec-kube-api-access-vtw5p\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455529 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-combined-ca-bundle\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.455550 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-config-data\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.457027 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45cdffc3-9762-433a-b1d8-7e6af001c0ec-logs\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.457056 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-nb\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.462011 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-combined-ca-bundle\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.462802 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-config\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.469073 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-dns-svc\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.469938 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-config-data\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.478216 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-sb\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.478872 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmgd4\" (UniqueName: \"kubernetes.io/projected/cbe053b9-bafb-4a8b-b495-f45b790f531f-kube-api-access-lmgd4\") pod \"dnsmasq-dns-6ffb94d8ff-2d7z8\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.483451 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtw5p\" (UniqueName: \"kubernetes.io/projected/45cdffc3-9762-433a-b1d8-7e6af001c0ec-kube-api-access-vtw5p\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.499428 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-scripts\") pod \"placement-db-sync-zvnng\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.589833 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.641154 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.750148 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-t5dkr"] Sep 29 22:46:12 crc kubenswrapper[4922]: W0929 22:46:12.792340 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87541b85_2c02_42b6_a2fa_94730f13932c.slice/crio-b6cfdc4b0bbeb53495badc253a6930fcd883a2a01a4b09120b1a9a545c956480 WatchSource:0}: Error finding container b6cfdc4b0bbeb53495badc253a6930fcd883a2a01a4b09120b1a9a545c956480: Status 404 returned error can't find the container with id b6cfdc4b0bbeb53495badc253a6930fcd883a2a01a4b09120b1a9a545c956480 Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.831935 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"1f217165de12b63c91e1fbd871ad07d3070b8407a3d9750bde397f3c7a1cc356"} Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.831986 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"7dc1ecd4e9d792ad830b6b3cddec0aca87a6fc32dfad2067e4fa602b228af523"} Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.836319 4922 generic.go:334] "Generic (PLEG): container finished" podID="7c673b83-04df-456d-b26c-6384ecfb5924" containerID="57ee8be2a2838ea0f44079b410352c4c9e3e3715c82c96b4a7b5124eccabbe06" exitCode=0 Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.836441 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3b06-account-create-4k8fh" event={"ID":"7c673b83-04df-456d-b26c-6384ecfb5924","Type":"ContainerDied","Data":"57ee8be2a2838ea0f44079b410352c4c9e3e3715c82c96b4a7b5124eccabbe06"} Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.916836 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:46:12 crc kubenswrapper[4922]: I0929 22:46:12.936436 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bzj7x"] Sep 29 22:46:13 crc kubenswrapper[4922]: I0929 22:46:13.454600 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-2d7z8"] Sep 29 22:46:13 crc kubenswrapper[4922]: I0929 22:46:13.625037 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zvnng"] Sep 29 22:46:13 crc kubenswrapper[4922]: I0929 22:46:13.868810 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" event={"ID":"87541b85-2c02-42b6-a2fa-94730f13932c","Type":"ContainerStarted","Data":"b6cfdc4b0bbeb53495badc253a6930fcd883a2a01a4b09120b1a9a545c956480"} Sep 29 22:46:13 crc kubenswrapper[4922]: I0929 22:46:13.874943 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9eda432e-f3e4-4a47-ab28-7175f2e28034","Type":"ContainerStarted","Data":"bbb9fcdd1a8a609ce08d760176b4cf9c83e327a574971912e69da294807031dc"} Sep 29 22:46:13 crc kubenswrapper[4922]: I0929 22:46:13.876808 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bzj7x" event={"ID":"6692bdb0-7f7c-4540-9c38-55b671525931","Type":"ContainerStarted","Data":"718d8ff26d31e148122cd49ac502310a831d77212d22b696a0e44e807421054d"} Sep 29 22:46:13 crc kubenswrapper[4922]: I0929 22:46:13.878814 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" event={"ID":"cbe053b9-bafb-4a8b-b495-f45b790f531f","Type":"ContainerStarted","Data":"c9a282ea3598309095430e6344961bda50af2667e739a10a0556853554620a4c"} Sep 29 22:46:14 crc kubenswrapper[4922]: I0929 22:46:14.688475 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:46:16 crc kubenswrapper[4922]: W0929 22:46:16.225674 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45cdffc3_9762_433a_b1d8_7e6af001c0ec.slice/crio-f3c84e701be060d12ae2ff8b19fb14244a6028a5f870beb02563614f47f22df7 WatchSource:0}: Error finding container f3c84e701be060d12ae2ff8b19fb14244a6028a5f870beb02563614f47f22df7: Status 404 returned error can't find the container with id f3c84e701be060d12ae2ff8b19fb14244a6028a5f870beb02563614f47f22df7 Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.347244 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3d37-account-create-9lzmk" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.351706 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3b06-account-create-4k8fh" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.357001 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-818d-account-create-rgcn6" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.423958 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7p4v\" (UniqueName: \"kubernetes.io/projected/8880043f-1bff-4e22-9f42-06d44ad027f8-kube-api-access-m7p4v\") pod \"8880043f-1bff-4e22-9f42-06d44ad027f8\" (UID: \"8880043f-1bff-4e22-9f42-06d44ad027f8\") " Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.424111 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcfzz\" (UniqueName: \"kubernetes.io/projected/7c673b83-04df-456d-b26c-6384ecfb5924-kube-api-access-mcfzz\") pod \"7c673b83-04df-456d-b26c-6384ecfb5924\" (UID: \"7c673b83-04df-456d-b26c-6384ecfb5924\") " Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.424191 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ln5bp\" (UniqueName: \"kubernetes.io/projected/818de8af-e1df-4003-8a42-c0f067a7a0cc-kube-api-access-ln5bp\") pod \"818de8af-e1df-4003-8a42-c0f067a7a0cc\" (UID: \"818de8af-e1df-4003-8a42-c0f067a7a0cc\") " Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.434377 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/818de8af-e1df-4003-8a42-c0f067a7a0cc-kube-api-access-ln5bp" (OuterVolumeSpecName: "kube-api-access-ln5bp") pod "818de8af-e1df-4003-8a42-c0f067a7a0cc" (UID: "818de8af-e1df-4003-8a42-c0f067a7a0cc"). InnerVolumeSpecName "kube-api-access-ln5bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.435215 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c673b83-04df-456d-b26c-6384ecfb5924-kube-api-access-mcfzz" (OuterVolumeSpecName: "kube-api-access-mcfzz") pod "7c673b83-04df-456d-b26c-6384ecfb5924" (UID: "7c673b83-04df-456d-b26c-6384ecfb5924"). InnerVolumeSpecName "kube-api-access-mcfzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.445739 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8880043f-1bff-4e22-9f42-06d44ad027f8-kube-api-access-m7p4v" (OuterVolumeSpecName: "kube-api-access-m7p4v") pod "8880043f-1bff-4e22-9f42-06d44ad027f8" (UID: "8880043f-1bff-4e22-9f42-06d44ad027f8"). InnerVolumeSpecName "kube-api-access-m7p4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.525934 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7p4v\" (UniqueName: \"kubernetes.io/projected/8880043f-1bff-4e22-9f42-06d44ad027f8-kube-api-access-m7p4v\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.525966 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcfzz\" (UniqueName: \"kubernetes.io/projected/7c673b83-04df-456d-b26c-6384ecfb5924-kube-api-access-mcfzz\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.525979 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ln5bp\" (UniqueName: \"kubernetes.io/projected/818de8af-e1df-4003-8a42-c0f067a7a0cc-kube-api-access-ln5bp\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.914277 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zvnng" event={"ID":"45cdffc3-9762-433a-b1d8-7e6af001c0ec","Type":"ContainerStarted","Data":"f3c84e701be060d12ae2ff8b19fb14244a6028a5f870beb02563614f47f22df7"} Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.916518 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-3d37-account-create-9lzmk" event={"ID":"8880043f-1bff-4e22-9f42-06d44ad027f8","Type":"ContainerDied","Data":"0807e3bedc6df209245796c646365a81d927c461f7ef4c14ca8f199809c42ac2"} Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.916552 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-3d37-account-create-9lzmk" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.916559 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0807e3bedc6df209245796c646365a81d927c461f7ef4c14ca8f199809c42ac2" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.918109 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bzj7x" event={"ID":"6692bdb0-7f7c-4540-9c38-55b671525931","Type":"ContainerStarted","Data":"0bedfca35eca2b938823e8ebdd743ac692fe48cb5465b4698b6f9f2ae56de9be"} Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.919679 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" event={"ID":"cbe053b9-bafb-4a8b-b495-f45b790f531f","Type":"ContainerStarted","Data":"7d6d759cc5fdfc5469163b3c5e8412fe29a3b506b3981463051c1f20a3c7526d"} Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.932786 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"e41c7b951d5523f493d3e44c422eb2f476674ac694ba6e3a443bb314f8068bab"} Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.942145 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3b06-account-create-4k8fh" event={"ID":"7c673b83-04df-456d-b26c-6384ecfb5924","Type":"ContainerDied","Data":"b1b5fb7e60c5bfb4d1cfe86d326c0c47740523b9835bb80c2fb862c4be7d2521"} Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.942204 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3b06-account-create-4k8fh" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.942242 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1b5fb7e60c5bfb4d1cfe86d326c0c47740523b9835bb80c2fb862c4be7d2521" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.943830 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" event={"ID":"87541b85-2c02-42b6-a2fa-94730f13932c","Type":"ContainerStarted","Data":"5774ece92513d29620b4bc50a4f72da4209c9ae1577c81a571a1eb7b5d636a4c"} Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.953436 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-818d-account-create-rgcn6" event={"ID":"818de8af-e1df-4003-8a42-c0f067a7a0cc","Type":"ContainerDied","Data":"163fbc442e51614057a6eaee08c9ae8377d58d94b60c405cc64b870c1503b49d"} Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.953493 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="163fbc442e51614057a6eaee08c9ae8377d58d94b60c405cc64b870c1503b49d" Sep 29 22:46:16 crc kubenswrapper[4922]: I0929 22:46:16.953628 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-818d-account-create-rgcn6" Sep 29 22:46:17 crc kubenswrapper[4922]: I0929 22:46:17.963901 4922 generic.go:334] "Generic (PLEG): container finished" podID="87541b85-2c02-42b6-a2fa-94730f13932c" containerID="5774ece92513d29620b4bc50a4f72da4209c9ae1577c81a571a1eb7b5d636a4c" exitCode=0 Sep 29 22:46:17 crc kubenswrapper[4922]: I0929 22:46:17.964196 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" event={"ID":"87541b85-2c02-42b6-a2fa-94730f13932c","Type":"ContainerDied","Data":"5774ece92513d29620b4bc50a4f72da4209c9ae1577c81a571a1eb7b5d636a4c"} Sep 29 22:46:17 crc kubenswrapper[4922]: I0929 22:46:17.965327 4922 generic.go:334] "Generic (PLEG): container finished" podID="cbe053b9-bafb-4a8b-b495-f45b790f531f" containerID="7d6d759cc5fdfc5469163b3c5e8412fe29a3b506b3981463051c1f20a3c7526d" exitCode=0 Sep 29 22:46:17 crc kubenswrapper[4922]: I0929 22:46:17.965431 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" event={"ID":"cbe053b9-bafb-4a8b-b495-f45b790f531f","Type":"ContainerDied","Data":"7d6d759cc5fdfc5469163b3c5e8412fe29a3b506b3981463051c1f20a3c7526d"} Sep 29 22:46:17 crc kubenswrapper[4922]: I0929 22:46:17.977326 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"27aeadd45b13c851d87c45f05a21adf10459ae93d03fae69b6ab3347a3cd7d2b"} Sep 29 22:46:17 crc kubenswrapper[4922]: I0929 22:46:17.977365 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"9c2b949ae2010cd19044ec6c16936ffd099b6ea65673b3704a021c3323514b40"} Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.044788 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-bzj7x" podStartSLOduration=7.044768462 podStartE2EDuration="7.044768462s" podCreationTimestamp="2025-09-29 22:46:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:18.019206536 +0000 UTC m=+1182.329495349" watchObservedRunningTime="2025-09-29 22:46:18.044768462 +0000 UTC m=+1182.355057275" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.236364 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.309857 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7hjc\" (UniqueName: \"kubernetes.io/projected/87541b85-2c02-42b6-a2fa-94730f13932c-kube-api-access-k7hjc\") pod \"87541b85-2c02-42b6-a2fa-94730f13932c\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.309941 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-sb\") pod \"87541b85-2c02-42b6-a2fa-94730f13932c\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.310004 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-config\") pod \"87541b85-2c02-42b6-a2fa-94730f13932c\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.310038 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-nb\") pod \"87541b85-2c02-42b6-a2fa-94730f13932c\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.310111 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-dns-svc\") pod \"87541b85-2c02-42b6-a2fa-94730f13932c\" (UID: \"87541b85-2c02-42b6-a2fa-94730f13932c\") " Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.332838 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87541b85-2c02-42b6-a2fa-94730f13932c-kube-api-access-k7hjc" (OuterVolumeSpecName: "kube-api-access-k7hjc") pod "87541b85-2c02-42b6-a2fa-94730f13932c" (UID: "87541b85-2c02-42b6-a2fa-94730f13932c"). InnerVolumeSpecName "kube-api-access-k7hjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.346114 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "87541b85-2c02-42b6-a2fa-94730f13932c" (UID: "87541b85-2c02-42b6-a2fa-94730f13932c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.354757 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "87541b85-2c02-42b6-a2fa-94730f13932c" (UID: "87541b85-2c02-42b6-a2fa-94730f13932c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.358752 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "87541b85-2c02-42b6-a2fa-94730f13932c" (UID: "87541b85-2c02-42b6-a2fa-94730f13932c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.364576 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-config" (OuterVolumeSpecName: "config") pod "87541b85-2c02-42b6-a2fa-94730f13932c" (UID: "87541b85-2c02-42b6-a2fa-94730f13932c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.412210 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7hjc\" (UniqueName: \"kubernetes.io/projected/87541b85-2c02-42b6-a2fa-94730f13932c-kube-api-access-k7hjc\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.412441 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.412529 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.412610 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.412661 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87541b85-2c02-42b6-a2fa-94730f13932c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.987087 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" event={"ID":"cbe053b9-bafb-4a8b-b495-f45b790f531f","Type":"ContainerStarted","Data":"5f262e5f7ffe6974ffe4a43d0357a32dd3208a0c8723f7593d68b86100be39b6"} Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.988443 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.994945 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerStarted","Data":"d96b9721a809407b59045e31403c469338494e50e97df48dd1a0aa74503cb5bd"} Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.996456 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" event={"ID":"87541b85-2c02-42b6-a2fa-94730f13932c","Type":"ContainerDied","Data":"b6cfdc4b0bbeb53495badc253a6930fcd883a2a01a4b09120b1a9a545c956480"} Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.996487 4922 scope.go:117] "RemoveContainer" containerID="5774ece92513d29620b4bc50a4f72da4209c9ae1577c81a571a1eb7b5d636a4c" Sep 29 22:46:18 crc kubenswrapper[4922]: I0929 22:46:18.996605 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9d85d47c-t5dkr" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.026191 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" podStartSLOduration=7.02617546 podStartE2EDuration="7.02617546s" podCreationTimestamp="2025-09-29 22:46:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:19.019854946 +0000 UTC m=+1183.330143769" watchObservedRunningTime="2025-09-29 22:46:19.02617546 +0000 UTC m=+1183.336464273" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.057096 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=25.308893258 podStartE2EDuration="40.057078377s" podCreationTimestamp="2025-09-29 22:45:39 +0000 UTC" firstStartedPulling="2025-09-29 22:45:56.600515742 +0000 UTC m=+1160.910804555" lastFinishedPulling="2025-09-29 22:46:11.348700821 +0000 UTC m=+1175.658989674" observedRunningTime="2025-09-29 22:46:19.054160285 +0000 UTC m=+1183.364449108" watchObservedRunningTime="2025-09-29 22:46:19.057078377 +0000 UTC m=+1183.367367180" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.088487 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-prv7k"] Sep 29 22:46:19 crc kubenswrapper[4922]: E0929 22:46:19.089170 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8880043f-1bff-4e22-9f42-06d44ad027f8" containerName="mariadb-account-create" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.089232 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8880043f-1bff-4e22-9f42-06d44ad027f8" containerName="mariadb-account-create" Sep 29 22:46:19 crc kubenswrapper[4922]: E0929 22:46:19.089291 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="818de8af-e1df-4003-8a42-c0f067a7a0cc" containerName="mariadb-account-create" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.089336 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="818de8af-e1df-4003-8a42-c0f067a7a0cc" containerName="mariadb-account-create" Sep 29 22:46:19 crc kubenswrapper[4922]: E0929 22:46:19.089431 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87541b85-2c02-42b6-a2fa-94730f13932c" containerName="init" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.089481 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="87541b85-2c02-42b6-a2fa-94730f13932c" containerName="init" Sep 29 22:46:19 crc kubenswrapper[4922]: E0929 22:46:19.089538 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c673b83-04df-456d-b26c-6384ecfb5924" containerName="mariadb-account-create" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.089584 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c673b83-04df-456d-b26c-6384ecfb5924" containerName="mariadb-account-create" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.089770 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c673b83-04df-456d-b26c-6384ecfb5924" containerName="mariadb-account-create" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.089828 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="87541b85-2c02-42b6-a2fa-94730f13932c" containerName="init" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.089883 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="818de8af-e1df-4003-8a42-c0f067a7a0cc" containerName="mariadb-account-create" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.089944 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8880043f-1bff-4e22-9f42-06d44ad027f8" containerName="mariadb-account-create" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.090620 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.094902 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-nkbxk" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.095200 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.115137 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-prv7k"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.135894 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-t5dkr"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.142215 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9d85d47c-t5dkr"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.224472 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngfgk\" (UniqueName: \"kubernetes.io/projected/f7a17d6c-65b4-4006-8d06-9942b01955d1-kube-api-access-ngfgk\") pod \"barbican-db-sync-prv7k\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.224571 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-combined-ca-bundle\") pod \"barbican-db-sync-prv7k\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.224634 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-db-sync-config-data\") pod \"barbican-db-sync-prv7k\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.326084 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-combined-ca-bundle\") pod \"barbican-db-sync-prv7k\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.326164 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-db-sync-config-data\") pod \"barbican-db-sync-prv7k\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.326219 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngfgk\" (UniqueName: \"kubernetes.io/projected/f7a17d6c-65b4-4006-8d06-9942b01955d1-kube-api-access-ngfgk\") pod \"barbican-db-sync-prv7k\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.344461 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-2d7z8"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.369879 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngfgk\" (UniqueName: \"kubernetes.io/projected/f7a17d6c-65b4-4006-8d06-9942b01955d1-kube-api-access-ngfgk\") pod \"barbican-db-sync-prv7k\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.370468 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-db-sync-config-data\") pod \"barbican-db-sync-prv7k\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.374668 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-combined-ca-bundle\") pod \"barbican-db-sync-prv7k\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.379560 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-45t4c"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.381402 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.384868 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.388499 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-45t4c"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.410485 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.445512 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-782mg"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.446574 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.448216 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.448673 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zk68r" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.448890 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.497467 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-782mg"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529013 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-scripts\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529086 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-db-sync-config-data\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529123 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-config-data\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529150 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529193 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgbvj\" (UniqueName: \"kubernetes.io/projected/830f2021-190b-481c-b306-3fd4e588eb9f-kube-api-access-xgbvj\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529213 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529229 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-config\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529248 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529262 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/43650a1d-3702-40e1-b4ef-2cc2f2343c28-etc-machine-id\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529305 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529319 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-combined-ca-bundle\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.529338 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qrwf\" (UniqueName: \"kubernetes.io/projected/43650a1d-3702-40e1-b4ef-2cc2f2343c28-kube-api-access-2qrwf\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.537157 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-b7zw5"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.538282 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.542881 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.543028 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.543128 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-wxrp8" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.559218 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-b7zw5"] Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.631119 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkq4c\" (UniqueName: \"kubernetes.io/projected/43fe2c56-fd4b-4e01-9550-49d15df8264f-kube-api-access-zkq4c\") pod \"neutron-db-sync-b7zw5\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.631375 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-scripts\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.631483 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-db-sync-config-data\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.631581 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-config-data\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.631672 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.631799 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-combined-ca-bundle\") pod \"neutron-db-sync-b7zw5\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.631886 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-config\") pod \"neutron-db-sync-b7zw5\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.631978 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgbvj\" (UniqueName: \"kubernetes.io/projected/830f2021-190b-481c-b306-3fd4e588eb9f-kube-api-access-xgbvj\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.632073 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.632162 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-config\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.632247 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.632322 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/43650a1d-3702-40e1-b4ef-2cc2f2343c28-etc-machine-id\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.632456 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.632528 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-combined-ca-bundle\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.632604 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.632617 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qrwf\" (UniqueName: \"kubernetes.io/projected/43650a1d-3702-40e1-b4ef-2cc2f2343c28-kube-api-access-2qrwf\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.632746 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/43650a1d-3702-40e1-b4ef-2cc2f2343c28-etc-machine-id\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.633266 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-config\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.633577 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.636301 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.636377 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-scripts\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.636315 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.636789 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-db-sync-config-data\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.636901 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-combined-ca-bundle\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.647873 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgbvj\" (UniqueName: \"kubernetes.io/projected/830f2021-190b-481c-b306-3fd4e588eb9f-kube-api-access-xgbvj\") pod \"dnsmasq-dns-fcfdd6f9f-45t4c\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.664840 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qrwf\" (UniqueName: \"kubernetes.io/projected/43650a1d-3702-40e1-b4ef-2cc2f2343c28-kube-api-access-2qrwf\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.665065 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-config-data\") pod \"cinder-db-sync-782mg\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.744644 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-combined-ca-bundle\") pod \"neutron-db-sync-b7zw5\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.744711 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-config\") pod \"neutron-db-sync-b7zw5\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.744955 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkq4c\" (UniqueName: \"kubernetes.io/projected/43fe2c56-fd4b-4e01-9550-49d15df8264f-kube-api-access-zkq4c\") pod \"neutron-db-sync-b7zw5\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.747626 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.756741 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-combined-ca-bundle\") pod \"neutron-db-sync-b7zw5\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.757732 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-config\") pod \"neutron-db-sync-b7zw5\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.761089 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-782mg" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.764886 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkq4c\" (UniqueName: \"kubernetes.io/projected/43fe2c56-fd4b-4e01-9550-49d15df8264f-kube-api-access-zkq4c\") pod \"neutron-db-sync-b7zw5\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:19 crc kubenswrapper[4922]: I0929 22:46:19.861315 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:20 crc kubenswrapper[4922]: I0929 22:46:20.432754 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87541b85-2c02-42b6-a2fa-94730f13932c" path="/var/lib/kubelet/pods/87541b85-2c02-42b6-a2fa-94730f13932c/volumes" Sep 29 22:46:21 crc kubenswrapper[4922]: I0929 22:46:21.015327 4922 generic.go:334] "Generic (PLEG): container finished" podID="6692bdb0-7f7c-4540-9c38-55b671525931" containerID="0bedfca35eca2b938823e8ebdd743ac692fe48cb5465b4698b6f9f2ae56de9be" exitCode=0 Sep 29 22:46:21 crc kubenswrapper[4922]: I0929 22:46:21.015407 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bzj7x" event={"ID":"6692bdb0-7f7c-4540-9c38-55b671525931","Type":"ContainerDied","Data":"0bedfca35eca2b938823e8ebdd743ac692fe48cb5465b4698b6f9f2ae56de9be"} Sep 29 22:46:21 crc kubenswrapper[4922]: I0929 22:46:21.015580 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" podUID="cbe053b9-bafb-4a8b-b495-f45b790f531f" containerName="dnsmasq-dns" containerID="cri-o://5f262e5f7ffe6974ffe4a43d0357a32dd3208a0c8723f7593d68b86100be39b6" gracePeriod=10 Sep 29 22:46:22 crc kubenswrapper[4922]: I0929 22:46:22.031458 4922 generic.go:334] "Generic (PLEG): container finished" podID="cbe053b9-bafb-4a8b-b495-f45b790f531f" containerID="5f262e5f7ffe6974ffe4a43d0357a32dd3208a0c8723f7593d68b86100be39b6" exitCode=0 Sep 29 22:46:22 crc kubenswrapper[4922]: I0929 22:46:22.031508 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" event={"ID":"cbe053b9-bafb-4a8b-b495-f45b790f531f","Type":"ContainerDied","Data":"5f262e5f7ffe6974ffe4a43d0357a32dd3208a0c8723f7593d68b86100be39b6"} Sep 29 22:46:22 crc kubenswrapper[4922]: I0929 22:46:22.033714 4922 generic.go:334] "Generic (PLEG): container finished" podID="2dc887c5-8fe3-46a0-af31-64c0b95dfcbf" containerID="fb91a14d5de3eb8dcd551bdc88c45ee6cbe872536cded7b69dbe38d418e4ae80" exitCode=0 Sep 29 22:46:22 crc kubenswrapper[4922]: I0929 22:46:22.033880 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6qjb4" event={"ID":"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf","Type":"ContainerDied","Data":"fb91a14d5de3eb8dcd551bdc88c45ee6cbe872536cded7b69dbe38d418e4ae80"} Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.055736 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bzj7x" event={"ID":"6692bdb0-7f7c-4540-9c38-55b671525931","Type":"ContainerDied","Data":"718d8ff26d31e148122cd49ac502310a831d77212d22b696a0e44e807421054d"} Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.056253 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="718d8ff26d31e148122cd49ac502310a831d77212d22b696a0e44e807421054d" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.075684 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.219890 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-credential-keys\") pod \"6692bdb0-7f7c-4540-9c38-55b671525931\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.219930 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-combined-ca-bundle\") pod \"6692bdb0-7f7c-4540-9c38-55b671525931\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.219983 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txvjp\" (UniqueName: \"kubernetes.io/projected/6692bdb0-7f7c-4540-9c38-55b671525931-kube-api-access-txvjp\") pod \"6692bdb0-7f7c-4540-9c38-55b671525931\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.220011 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-fernet-keys\") pod \"6692bdb0-7f7c-4540-9c38-55b671525931\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.220076 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-scripts\") pod \"6692bdb0-7f7c-4540-9c38-55b671525931\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.220154 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-config-data\") pod \"6692bdb0-7f7c-4540-9c38-55b671525931\" (UID: \"6692bdb0-7f7c-4540-9c38-55b671525931\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.226517 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6692bdb0-7f7c-4540-9c38-55b671525931-kube-api-access-txvjp" (OuterVolumeSpecName: "kube-api-access-txvjp") pod "6692bdb0-7f7c-4540-9c38-55b671525931" (UID: "6692bdb0-7f7c-4540-9c38-55b671525931"). InnerVolumeSpecName "kube-api-access-txvjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.228092 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6692bdb0-7f7c-4540-9c38-55b671525931" (UID: "6692bdb0-7f7c-4540-9c38-55b671525931"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.245559 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-scripts" (OuterVolumeSpecName: "scripts") pod "6692bdb0-7f7c-4540-9c38-55b671525931" (UID: "6692bdb0-7f7c-4540-9c38-55b671525931"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.246922 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6692bdb0-7f7c-4540-9c38-55b671525931" (UID: "6692bdb0-7f7c-4540-9c38-55b671525931"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.250930 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-config-data" (OuterVolumeSpecName: "config-data") pod "6692bdb0-7f7c-4540-9c38-55b671525931" (UID: "6692bdb0-7f7c-4540-9c38-55b671525931"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.285543 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6692bdb0-7f7c-4540-9c38-55b671525931" (UID: "6692bdb0-7f7c-4540-9c38-55b671525931"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.324225 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.324257 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.324268 4922 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.324279 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.324289 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txvjp\" (UniqueName: \"kubernetes.io/projected/6692bdb0-7f7c-4540-9c38-55b671525931-kube-api-access-txvjp\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.324297 4922 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6692bdb0-7f7c-4540-9c38-55b671525931-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.590983 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6qjb4" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.733553 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-config-data\") pod \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.734562 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-combined-ca-bundle\") pod \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.734634 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4btmw\" (UniqueName: \"kubernetes.io/projected/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-kube-api-access-4btmw\") pod \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.735053 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-db-sync-config-data\") pod \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\" (UID: \"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf\") " Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.738953 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-kube-api-access-4btmw" (OuterVolumeSpecName: "kube-api-access-4btmw") pod "2dc887c5-8fe3-46a0-af31-64c0b95dfcbf" (UID: "2dc887c5-8fe3-46a0-af31-64c0b95dfcbf"). InnerVolumeSpecName "kube-api-access-4btmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.739756 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2dc887c5-8fe3-46a0-af31-64c0b95dfcbf" (UID: "2dc887c5-8fe3-46a0-af31-64c0b95dfcbf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.782367 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2dc887c5-8fe3-46a0-af31-64c0b95dfcbf" (UID: "2dc887c5-8fe3-46a0-af31-64c0b95dfcbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.837361 4922 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.837409 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.837419 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4btmw\" (UniqueName: \"kubernetes.io/projected/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-kube-api-access-4btmw\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.840133 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-config-data" (OuterVolumeSpecName: "config-data") pod "2dc887c5-8fe3-46a0-af31-64c0b95dfcbf" (UID: "2dc887c5-8fe3-46a0-af31-64c0b95dfcbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.881243 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:23 crc kubenswrapper[4922]: I0929 22:46:23.938751 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.039979 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-nb\") pod \"cbe053b9-bafb-4a8b-b495-f45b790f531f\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.040048 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-dns-svc\") pod \"cbe053b9-bafb-4a8b-b495-f45b790f531f\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.040114 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-sb\") pod \"cbe053b9-bafb-4a8b-b495-f45b790f531f\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.040226 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmgd4\" (UniqueName: \"kubernetes.io/projected/cbe053b9-bafb-4a8b-b495-f45b790f531f-kube-api-access-lmgd4\") pod \"cbe053b9-bafb-4a8b-b495-f45b790f531f\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.040299 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-config\") pod \"cbe053b9-bafb-4a8b-b495-f45b790f531f\" (UID: \"cbe053b9-bafb-4a8b-b495-f45b790f531f\") " Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.052617 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbe053b9-bafb-4a8b-b495-f45b790f531f-kube-api-access-lmgd4" (OuterVolumeSpecName: "kube-api-access-lmgd4") pod "cbe053b9-bafb-4a8b-b495-f45b790f531f" (UID: "cbe053b9-bafb-4a8b-b495-f45b790f531f"). InnerVolumeSpecName "kube-api-access-lmgd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.064461 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zvnng" event={"ID":"45cdffc3-9762-433a-b1d8-7e6af001c0ec","Type":"ContainerStarted","Data":"df2e60aab7e3d8ae2a09de0155bb6512d8a5fcc3b4d52f7a29d46d18974c062f"} Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.076494 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9eda432e-f3e4-4a47-ab28-7175f2e28034","Type":"ContainerStarted","Data":"6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc"} Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.085656 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" event={"ID":"cbe053b9-bafb-4a8b-b495-f45b790f531f","Type":"ContainerDied","Data":"c9a282ea3598309095430e6344961bda50af2667e739a10a0556853554620a4c"} Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.085692 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6ffb94d8ff-2d7z8" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.085908 4922 scope.go:117] "RemoveContainer" containerID="5f262e5f7ffe6974ffe4a43d0357a32dd3208a0c8723f7593d68b86100be39b6" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.087712 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bzj7x" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.091524 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6qjb4" event={"ID":"2dc887c5-8fe3-46a0-af31-64c0b95dfcbf","Type":"ContainerDied","Data":"7cff0f84b5b6049d342df02be257a9f0688c851c22880e0d574daa06594f0eb6"} Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.092083 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cff0f84b5b6049d342df02be257a9f0688c851c22880e0d574daa06594f0eb6" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.092160 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6qjb4" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.100599 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-config" (OuterVolumeSpecName: "config") pod "cbe053b9-bafb-4a8b-b495-f45b790f531f" (UID: "cbe053b9-bafb-4a8b-b495-f45b790f531f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.103864 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-zvnng" podStartSLOduration=4.716293706 podStartE2EDuration="12.103844697s" podCreationTimestamp="2025-09-29 22:46:12 +0000 UTC" firstStartedPulling="2025-09-29 22:46:16.228228167 +0000 UTC m=+1180.538516980" lastFinishedPulling="2025-09-29 22:46:23.615779158 +0000 UTC m=+1187.926067971" observedRunningTime="2025-09-29 22:46:24.080019754 +0000 UTC m=+1188.390308557" watchObservedRunningTime="2025-09-29 22:46:24.103844697 +0000 UTC m=+1188.414133520" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.104239 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cbe053b9-bafb-4a8b-b495-f45b790f531f" (UID: "cbe053b9-bafb-4a8b-b495-f45b790f531f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.113256 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cbe053b9-bafb-4a8b-b495-f45b790f531f" (UID: "cbe053b9-bafb-4a8b-b495-f45b790f531f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.120712 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-prv7k"] Sep 29 22:46:24 crc kubenswrapper[4922]: W0929 22:46:24.129505 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7a17d6c_65b4_4006_8d06_9942b01955d1.slice/crio-5746a82a2853a7233df6ebe66c59ba55422e27157c6e3aecac57f0b7b2150098 WatchSource:0}: Error finding container 5746a82a2853a7233df6ebe66c59ba55422e27157c6e3aecac57f0b7b2150098: Status 404 returned error can't find the container with id 5746a82a2853a7233df6ebe66c59ba55422e27157c6e3aecac57f0b7b2150098 Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.140607 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cbe053b9-bafb-4a8b-b495-f45b790f531f" (UID: "cbe053b9-bafb-4a8b-b495-f45b790f531f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.142104 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmgd4\" (UniqueName: \"kubernetes.io/projected/cbe053b9-bafb-4a8b-b495-f45b790f531f-kube-api-access-lmgd4\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.142135 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.142153 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.142164 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.142176 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cbe053b9-bafb-4a8b-b495-f45b790f531f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:24 crc kubenswrapper[4922]: W0929 22:46:24.197524 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43650a1d_3702_40e1_b4ef_2cc2f2343c28.slice/crio-c0233aed02eb9eaf3463aae814d16dd6841a797e296be52cbb760cae003ccfd3 WatchSource:0}: Error finding container c0233aed02eb9eaf3463aae814d16dd6841a797e296be52cbb760cae003ccfd3: Status 404 returned error can't find the container with id c0233aed02eb9eaf3463aae814d16dd6841a797e296be52cbb760cae003ccfd3 Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.197859 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-b7zw5"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.206752 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-45t4c"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.213199 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-782mg"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.224125 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-bzj7x"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.232054 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-bzj7x"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.265543 4922 scope.go:117] "RemoveContainer" containerID="7d6d759cc5fdfc5469163b3c5e8412fe29a3b506b3981463051c1f20a3c7526d" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.342726 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-59s2p"] Sep 29 22:46:24 crc kubenswrapper[4922]: E0929 22:46:24.343120 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dc887c5-8fe3-46a0-af31-64c0b95dfcbf" containerName="glance-db-sync" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.343136 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dc887c5-8fe3-46a0-af31-64c0b95dfcbf" containerName="glance-db-sync" Sep 29 22:46:24 crc kubenswrapper[4922]: E0929 22:46:24.343149 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6692bdb0-7f7c-4540-9c38-55b671525931" containerName="keystone-bootstrap" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.343155 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6692bdb0-7f7c-4540-9c38-55b671525931" containerName="keystone-bootstrap" Sep 29 22:46:24 crc kubenswrapper[4922]: E0929 22:46:24.343169 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbe053b9-bafb-4a8b-b495-f45b790f531f" containerName="init" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.343176 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbe053b9-bafb-4a8b-b495-f45b790f531f" containerName="init" Sep 29 22:46:24 crc kubenswrapper[4922]: E0929 22:46:24.343195 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbe053b9-bafb-4a8b-b495-f45b790f531f" containerName="dnsmasq-dns" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.343201 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbe053b9-bafb-4a8b-b495-f45b790f531f" containerName="dnsmasq-dns" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.343410 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6692bdb0-7f7c-4540-9c38-55b671525931" containerName="keystone-bootstrap" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.343430 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbe053b9-bafb-4a8b-b495-f45b790f531f" containerName="dnsmasq-dns" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.343452 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dc887c5-8fe3-46a0-af31-64c0b95dfcbf" containerName="glance-db-sync" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.343992 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.346568 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.346705 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.346738 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.350899 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vvbbl" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.354397 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-59s2p"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.457080 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-credential-keys\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.457129 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g459z\" (UniqueName: \"kubernetes.io/projected/441161de-0f1a-47e8-8adb-5d6a74989cb4-kube-api-access-g459z\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.457193 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-fernet-keys\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.457222 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-config-data\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.457257 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-combined-ca-bundle\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.457285 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-scripts\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.459812 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6692bdb0-7f7c-4540-9c38-55b671525931" path="/var/lib/kubelet/pods/6692bdb0-7f7c-4540-9c38-55b671525931/volumes" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.460467 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-45t4c"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.460497 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-2d7z8"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.472829 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6ffb94d8ff-2d7z8"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.486919 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-pclvw"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.489530 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.496566 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-pclvw"] Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.559671 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-fernet-keys\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.559800 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-config-data\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.559895 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-combined-ca-bundle\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.559973 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-scripts\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.560057 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-credential-keys\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.560105 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g459z\" (UniqueName: \"kubernetes.io/projected/441161de-0f1a-47e8-8adb-5d6a74989cb4-kube-api-access-g459z\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.564122 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-config-data\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.566343 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-combined-ca-bundle\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.566935 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-scripts\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.567022 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-credential-keys\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.567840 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-fernet-keys\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.596888 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g459z\" (UniqueName: \"kubernetes.io/projected/441161de-0f1a-47e8-8adb-5d6a74989cb4-kube-api-access-g459z\") pod \"keystone-bootstrap-59s2p\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.667518 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.667852 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.667911 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdpbf\" (UniqueName: \"kubernetes.io/projected/41540d9c-6693-4f54-b2c4-827a507918b1-kube-api-access-jdpbf\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.667933 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-config\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.667990 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.668009 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.701744 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.769250 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdpbf\" (UniqueName: \"kubernetes.io/projected/41540d9c-6693-4f54-b2c4-827a507918b1-kube-api-access-jdpbf\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.769324 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-config\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.769414 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.769433 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.769508 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.769528 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.770421 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.771217 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-config\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.772369 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.773044 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.773524 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.787011 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdpbf\" (UniqueName: \"kubernetes.io/projected/41540d9c-6693-4f54-b2c4-827a507918b1-kube-api-access-jdpbf\") pod \"dnsmasq-dns-57c957c4ff-pclvw\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.816269 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:24 crc kubenswrapper[4922]: E0929 22:46:24.836293 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod830f2021_190b_481c_b306_3fd4e588eb9f.slice/crio-conmon-9e2019fefd4db1c265fc3f57061750350850e60bc61dc955361ef86e7d83cf02.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod830f2021_190b_481c_b306_3fd4e588eb9f.slice/crio-9e2019fefd4db1c265fc3f57061750350850e60bc61dc955361ef86e7d83cf02.scope\": RecentStats: unable to find data in memory cache]" Sep 29 22:46:24 crc kubenswrapper[4922]: I0929 22:46:24.967607 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-59s2p"] Sep 29 22:46:24 crc kubenswrapper[4922]: W0929 22:46:24.990543 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod441161de_0f1a_47e8_8adb_5d6a74989cb4.slice/crio-224a03fbac4ea68a31d3b4db71deeed7a2cd34a91eb2d4cb1362f32d60b87ef9 WatchSource:0}: Error finding container 224a03fbac4ea68a31d3b4db71deeed7a2cd34a91eb2d4cb1362f32d60b87ef9: Status 404 returned error can't find the container with id 224a03fbac4ea68a31d3b4db71deeed7a2cd34a91eb2d4cb1362f32d60b87ef9 Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.104513 4922 generic.go:334] "Generic (PLEG): container finished" podID="830f2021-190b-481c-b306-3fd4e588eb9f" containerID="9e2019fefd4db1c265fc3f57061750350850e60bc61dc955361ef86e7d83cf02" exitCode=0 Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.104697 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" event={"ID":"830f2021-190b-481c-b306-3fd4e588eb9f","Type":"ContainerDied","Data":"9e2019fefd4db1c265fc3f57061750350850e60bc61dc955361ef86e7d83cf02"} Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.104813 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" event={"ID":"830f2021-190b-481c-b306-3fd4e588eb9f","Type":"ContainerStarted","Data":"4da868033f8711fe9a41c06b782e280e807f28e8e40285b9f6ee25624018086d"} Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.106640 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-59s2p" event={"ID":"441161de-0f1a-47e8-8adb-5d6a74989cb4","Type":"ContainerStarted","Data":"224a03fbac4ea68a31d3b4db71deeed7a2cd34a91eb2d4cb1362f32d60b87ef9"} Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.129279 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-b7zw5" event={"ID":"43fe2c56-fd4b-4e01-9550-49d15df8264f","Type":"ContainerStarted","Data":"b137252b3fcf80b102a7512521912ab1e7489cb1db512f4c41d4510733941949"} Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.129338 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-b7zw5" event={"ID":"43fe2c56-fd4b-4e01-9550-49d15df8264f","Type":"ContainerStarted","Data":"1226b62c31d57bac603b763cf7f487331a9041277c99f973c0e6f7a2dfb0edb0"} Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.137869 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-prv7k" event={"ID":"f7a17d6c-65b4-4006-8d06-9942b01955d1","Type":"ContainerStarted","Data":"5746a82a2853a7233df6ebe66c59ba55422e27157c6e3aecac57f0b7b2150098"} Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.144376 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-782mg" event={"ID":"43650a1d-3702-40e1-b4ef-2cc2f2343c28","Type":"ContainerStarted","Data":"c0233aed02eb9eaf3463aae814d16dd6841a797e296be52cbb760cae003ccfd3"} Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.161076 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-b7zw5" podStartSLOduration=6.161054502 podStartE2EDuration="6.161054502s" podCreationTimestamp="2025-09-29 22:46:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:25.152195465 +0000 UTC m=+1189.462484278" watchObservedRunningTime="2025-09-29 22:46:25.161054502 +0000 UTC m=+1189.471343315" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.366587 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-pclvw"] Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.447709 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.467080 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.469240 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.469450 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-2j6pm" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.469682 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.485696 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.532351 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.538841 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.541932 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.550671 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.553923 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.584532 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-scripts\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.584575 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.584619 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-logs\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.584676 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-config-data\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.584705 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.584755 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.584800 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5pdh\" (UniqueName: \"kubernetes.io/projected/80abe265-931c-437c-a82e-768ff29f853a-kube-api-access-l5pdh\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.685772 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-svc\") pod \"830f2021-190b-481c-b306-3fd4e588eb9f\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.685830 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-sb\") pod \"830f2021-190b-481c-b306-3fd4e588eb9f\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.685881 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-swift-storage-0\") pod \"830f2021-190b-481c-b306-3fd4e588eb9f\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.685906 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-nb\") pod \"830f2021-190b-481c-b306-3fd4e588eb9f\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.685963 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-config\") pod \"830f2021-190b-481c-b306-3fd4e588eb9f\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686029 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgbvj\" (UniqueName: \"kubernetes.io/projected/830f2021-190b-481c-b306-3fd4e588eb9f-kube-api-access-xgbvj\") pod \"830f2021-190b-481c-b306-3fd4e588eb9f\" (UID: \"830f2021-190b-481c-b306-3fd4e588eb9f\") " Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686291 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk4h6\" (UniqueName: \"kubernetes.io/projected/909788bb-9053-4ed7-85d8-1c36ea96066c-kube-api-access-nk4h6\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686321 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686347 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-config-data\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686379 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-logs\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686409 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686433 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686469 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686493 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686517 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5pdh\" (UniqueName: \"kubernetes.io/projected/80abe265-931c-437c-a82e-768ff29f853a-kube-api-access-l5pdh\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686534 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686581 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-scripts\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686602 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686617 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.686651 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-logs\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.687013 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-logs\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.687546 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.688712 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.696011 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-scripts\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.700732 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-config-data\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.701180 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.706838 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/830f2021-190b-481c-b306-3fd4e588eb9f-kube-api-access-xgbvj" (OuterVolumeSpecName: "kube-api-access-xgbvj") pod "830f2021-190b-481c-b306-3fd4e588eb9f" (UID: "830f2021-190b-481c-b306-3fd4e588eb9f"). InnerVolumeSpecName "kube-api-access-xgbvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.709570 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5pdh\" (UniqueName: \"kubernetes.io/projected/80abe265-931c-437c-a82e-768ff29f853a-kube-api-access-l5pdh\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.740429 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "830f2021-190b-481c-b306-3fd4e588eb9f" (UID: "830f2021-190b-481c-b306-3fd4e588eb9f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.755201 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.766458 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "830f2021-190b-481c-b306-3fd4e588eb9f" (UID: "830f2021-190b-481c-b306-3fd4e588eb9f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.776252 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-config" (OuterVolumeSpecName: "config") pod "830f2021-190b-481c-b306-3fd4e588eb9f" (UID: "830f2021-190b-481c-b306-3fd4e588eb9f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.781383 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "830f2021-190b-481c-b306-3fd4e588eb9f" (UID: "830f2021-190b-481c-b306-3fd4e588eb9f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.782832 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "830f2021-190b-481c-b306-3fd4e588eb9f" (UID: "830f2021-190b-481c-b306-3fd4e588eb9f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.788078 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk4h6\" (UniqueName: \"kubernetes.io/projected/909788bb-9053-4ed7-85d8-1c36ea96066c-kube-api-access-nk4h6\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.788124 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.788169 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-logs\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.788195 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.788480 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.789670 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-logs\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.789745 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.789811 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.789983 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.790135 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.790148 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.790159 4922 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.790169 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.790179 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/830f2021-190b-481c-b306-3fd4e588eb9f-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.790189 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgbvj\" (UniqueName: \"kubernetes.io/projected/830f2021-190b-481c-b306-3fd4e588eb9f-kube-api-access-xgbvj\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.790747 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.794237 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.797000 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.801286 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.812476 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk4h6\" (UniqueName: \"kubernetes.io/projected/909788bb-9053-4ed7-85d8-1c36ea96066c-kube-api-access-nk4h6\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.826110 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.828755 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:25 crc kubenswrapper[4922]: I0929 22:46:25.871891 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.161514 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" event={"ID":"830f2021-190b-481c-b306-3fd4e588eb9f","Type":"ContainerDied","Data":"4da868033f8711fe9a41c06b782e280e807f28e8e40285b9f6ee25624018086d"} Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.162912 4922 scope.go:117] "RemoveContainer" containerID="9e2019fefd4db1c265fc3f57061750350850e60bc61dc955361ef86e7d83cf02" Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.161679 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-45t4c" Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.164272 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-59s2p" event={"ID":"441161de-0f1a-47e8-8adb-5d6a74989cb4","Type":"ContainerStarted","Data":"7c2326df9a7a292c4ca8fe772bd0618a690c2b040399447a26c66ca42613a28c"} Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.172523 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" event={"ID":"41540d9c-6693-4f54-b2c4-827a507918b1","Type":"ContainerStarted","Data":"fc1ca8d792a73619a0707b10207cee9423fb4323322f46cbd10cdd8c08e7e4a0"} Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.192667 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-59s2p" podStartSLOduration=2.192649329 podStartE2EDuration="2.192649329s" podCreationTimestamp="2025-09-29 22:46:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:26.185465263 +0000 UTC m=+1190.495754086" watchObservedRunningTime="2025-09-29 22:46:26.192649329 +0000 UTC m=+1190.502938142" Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.281499 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-45t4c"] Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.290922 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-45t4c"] Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.437548 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="830f2021-190b-481c-b306-3fd4e588eb9f" path="/var/lib/kubelet/pods/830f2021-190b-481c-b306-3fd4e588eb9f/volumes" Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.438140 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbe053b9-bafb-4a8b-b495-f45b790f531f" path="/var/lib/kubelet/pods/cbe053b9-bafb-4a8b-b495-f45b790f531f/volumes" Sep 29 22:46:26 crc kubenswrapper[4922]: I0929 22:46:26.993925 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:46:27 crc kubenswrapper[4922]: I0929 22:46:27.203582 4922 generic.go:334] "Generic (PLEG): container finished" podID="41540d9c-6693-4f54-b2c4-827a507918b1" containerID="bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc" exitCode=0 Sep 29 22:46:27 crc kubenswrapper[4922]: I0929 22:46:27.204720 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" event={"ID":"41540d9c-6693-4f54-b2c4-827a507918b1","Type":"ContainerDied","Data":"bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc"} Sep 29 22:46:27 crc kubenswrapper[4922]: W0929 22:46:27.218674 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod909788bb_9053_4ed7_85d8_1c36ea96066c.slice/crio-e25888681039c2866b46d92b03872bd1121ef48fe292204590389a7009c2b3b8 WatchSource:0}: Error finding container e25888681039c2866b46d92b03872bd1121ef48fe292204590389a7009c2b3b8: Status 404 returned error can't find the container with id e25888681039c2866b46d92b03872bd1121ef48fe292204590389a7009c2b3b8 Sep 29 22:46:27 crc kubenswrapper[4922]: I0929 22:46:27.580255 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:46:27 crc kubenswrapper[4922]: I0929 22:46:27.629892 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:46:27 crc kubenswrapper[4922]: I0929 22:46:27.907340 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:46:28 crc kubenswrapper[4922]: I0929 22:46:28.235051 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"909788bb-9053-4ed7-85d8-1c36ea96066c","Type":"ContainerStarted","Data":"8bcf0c93a244256c64ddedaff63fce16213a016c93e245bdde1d341d23f26a0d"} Sep 29 22:46:28 crc kubenswrapper[4922]: I0929 22:46:28.235098 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"909788bb-9053-4ed7-85d8-1c36ea96066c","Type":"ContainerStarted","Data":"e25888681039c2866b46d92b03872bd1121ef48fe292204590389a7009c2b3b8"} Sep 29 22:46:28 crc kubenswrapper[4922]: I0929 22:46:28.236798 4922 generic.go:334] "Generic (PLEG): container finished" podID="45cdffc3-9762-433a-b1d8-7e6af001c0ec" containerID="df2e60aab7e3d8ae2a09de0155bb6512d8a5fcc3b4d52f7a29d46d18974c062f" exitCode=0 Sep 29 22:46:28 crc kubenswrapper[4922]: I0929 22:46:28.236915 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zvnng" event={"ID":"45cdffc3-9762-433a-b1d8-7e6af001c0ec","Type":"ContainerDied","Data":"df2e60aab7e3d8ae2a09de0155bb6512d8a5fcc3b4d52f7a29d46d18974c062f"} Sep 29 22:46:28 crc kubenswrapper[4922]: I0929 22:46:28.238887 4922 generic.go:334] "Generic (PLEG): container finished" podID="441161de-0f1a-47e8-8adb-5d6a74989cb4" containerID="7c2326df9a7a292c4ca8fe772bd0618a690c2b040399447a26c66ca42613a28c" exitCode=0 Sep 29 22:46:28 crc kubenswrapper[4922]: I0929 22:46:28.238936 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-59s2p" event={"ID":"441161de-0f1a-47e8-8adb-5d6a74989cb4","Type":"ContainerDied","Data":"7c2326df9a7a292c4ca8fe772bd0618a690c2b040399447a26c66ca42613a28c"} Sep 29 22:46:28 crc kubenswrapper[4922]: I0929 22:46:28.913043 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:46:28 crc kubenswrapper[4922]: I0929 22:46:28.913100 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.268223 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-59s2p" event={"ID":"441161de-0f1a-47e8-8adb-5d6a74989cb4","Type":"ContainerDied","Data":"224a03fbac4ea68a31d3b4db71deeed7a2cd34a91eb2d4cb1362f32d60b87ef9"} Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.268617 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="224a03fbac4ea68a31d3b4db71deeed7a2cd34a91eb2d4cb1362f32d60b87ef9" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.270458 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zvnng" event={"ID":"45cdffc3-9762-433a-b1d8-7e6af001c0ec","Type":"ContainerDied","Data":"f3c84e701be060d12ae2ff8b19fb14244a6028a5f870beb02563614f47f22df7"} Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.270497 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3c84e701be060d12ae2ff8b19fb14244a6028a5f870beb02563614f47f22df7" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.271696 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"80abe265-931c-437c-a82e-768ff29f853a","Type":"ContainerStarted","Data":"cfcd2d6968e223ab931ff80468a8feccd2ec594292ee30ea5472c384d011b2b8"} Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.302237 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.344873 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.382645 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45cdffc3-9762-433a-b1d8-7e6af001c0ec-logs\") pod \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.382715 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtw5p\" (UniqueName: \"kubernetes.io/projected/45cdffc3-9762-433a-b1d8-7e6af001c0ec-kube-api-access-vtw5p\") pod \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.382816 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-config-data\") pod \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.382852 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-scripts\") pod \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.382930 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-combined-ca-bundle\") pod \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\" (UID: \"45cdffc3-9762-433a-b1d8-7e6af001c0ec\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.384755 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45cdffc3-9762-433a-b1d8-7e6af001c0ec-logs" (OuterVolumeSpecName: "logs") pod "45cdffc3-9762-433a-b1d8-7e6af001c0ec" (UID: "45cdffc3-9762-433a-b1d8-7e6af001c0ec"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.388377 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-scripts" (OuterVolumeSpecName: "scripts") pod "45cdffc3-9762-433a-b1d8-7e6af001c0ec" (UID: "45cdffc3-9762-433a-b1d8-7e6af001c0ec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.392102 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45cdffc3-9762-433a-b1d8-7e6af001c0ec-kube-api-access-vtw5p" (OuterVolumeSpecName: "kube-api-access-vtw5p") pod "45cdffc3-9762-433a-b1d8-7e6af001c0ec" (UID: "45cdffc3-9762-433a-b1d8-7e6af001c0ec"). InnerVolumeSpecName "kube-api-access-vtw5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.409529 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-config-data" (OuterVolumeSpecName: "config-data") pod "45cdffc3-9762-433a-b1d8-7e6af001c0ec" (UID: "45cdffc3-9762-433a-b1d8-7e6af001c0ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.433698 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45cdffc3-9762-433a-b1d8-7e6af001c0ec" (UID: "45cdffc3-9762-433a-b1d8-7e6af001c0ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.484326 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-config-data\") pod \"441161de-0f1a-47e8-8adb-5d6a74989cb4\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.484417 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-fernet-keys\") pod \"441161de-0f1a-47e8-8adb-5d6a74989cb4\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.484446 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g459z\" (UniqueName: \"kubernetes.io/projected/441161de-0f1a-47e8-8adb-5d6a74989cb4-kube-api-access-g459z\") pod \"441161de-0f1a-47e8-8adb-5d6a74989cb4\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.484579 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-combined-ca-bundle\") pod \"441161de-0f1a-47e8-8adb-5d6a74989cb4\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.484625 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-scripts\") pod \"441161de-0f1a-47e8-8adb-5d6a74989cb4\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.484738 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-credential-keys\") pod \"441161de-0f1a-47e8-8adb-5d6a74989cb4\" (UID: \"441161de-0f1a-47e8-8adb-5d6a74989cb4\") " Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.485246 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.485264 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/45cdffc3-9762-433a-b1d8-7e6af001c0ec-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.485273 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtw5p\" (UniqueName: \"kubernetes.io/projected/45cdffc3-9762-433a-b1d8-7e6af001c0ec-kube-api-access-vtw5p\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.485283 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.485291 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45cdffc3-9762-433a-b1d8-7e6af001c0ec-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.487991 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "441161de-0f1a-47e8-8adb-5d6a74989cb4" (UID: "441161de-0f1a-47e8-8adb-5d6a74989cb4"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.488507 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "441161de-0f1a-47e8-8adb-5d6a74989cb4" (UID: "441161de-0f1a-47e8-8adb-5d6a74989cb4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.489602 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-scripts" (OuterVolumeSpecName: "scripts") pod "441161de-0f1a-47e8-8adb-5d6a74989cb4" (UID: "441161de-0f1a-47e8-8adb-5d6a74989cb4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.489904 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/441161de-0f1a-47e8-8adb-5d6a74989cb4-kube-api-access-g459z" (OuterVolumeSpecName: "kube-api-access-g459z") pod "441161de-0f1a-47e8-8adb-5d6a74989cb4" (UID: "441161de-0f1a-47e8-8adb-5d6a74989cb4"). InnerVolumeSpecName "kube-api-access-g459z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.509084 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "441161de-0f1a-47e8-8adb-5d6a74989cb4" (UID: "441161de-0f1a-47e8-8adb-5d6a74989cb4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.520586 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-config-data" (OuterVolumeSpecName: "config-data") pod "441161de-0f1a-47e8-8adb-5d6a74989cb4" (UID: "441161de-0f1a-47e8-8adb-5d6a74989cb4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.586376 4922 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.586431 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.586442 4922 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.586455 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g459z\" (UniqueName: \"kubernetes.io/projected/441161de-0f1a-47e8-8adb-5d6a74989cb4-kube-api-access-g459z\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.586469 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:30 crc kubenswrapper[4922]: I0929 22:46:30.586479 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/441161de-0f1a-47e8-8adb-5d6a74989cb4-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.283699 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"80abe265-931c-437c-a82e-768ff29f853a","Type":"ContainerStarted","Data":"7e39e24258fe808639c97435d4ac9f91b722f961d68853b5ee3288a90da7acf3"} Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.285806 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" event={"ID":"41540d9c-6693-4f54-b2c4-827a507918b1","Type":"ContainerStarted","Data":"2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141"} Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.286647 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.294486 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-prv7k" event={"ID":"f7a17d6c-65b4-4006-8d06-9942b01955d1","Type":"ContainerStarted","Data":"f53c7b3b9f62eb3961e0c29dd8e4620868ae009185345b663011a716c95d6ad7"} Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.297810 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"909788bb-9053-4ed7-85d8-1c36ea96066c","Type":"ContainerStarted","Data":"86fdc90b39b1b811f3dc738d107bbe0b5c017f5d91f898435027426b66e77afc"} Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.297968 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerName="glance-log" containerID="cri-o://8bcf0c93a244256c64ddedaff63fce16213a016c93e245bdde1d341d23f26a0d" gracePeriod=30 Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.297987 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerName="glance-httpd" containerID="cri-o://86fdc90b39b1b811f3dc738d107bbe0b5c017f5d91f898435027426b66e77afc" gracePeriod=30 Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.300584 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-59s2p" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.310326 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9eda432e-f3e4-4a47-ab28-7175f2e28034","Type":"ContainerStarted","Data":"b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8"} Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.310410 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zvnng" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.315893 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" podStartSLOduration=7.31583422 podStartE2EDuration="7.31583422s" podCreationTimestamp="2025-09-29 22:46:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:31.310854858 +0000 UTC m=+1195.621143681" watchObservedRunningTime="2025-09-29 22:46:31.31583422 +0000 UTC m=+1195.626123043" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.331437 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-prv7k" podStartSLOduration=6.318308831 podStartE2EDuration="12.331415012s" podCreationTimestamp="2025-09-29 22:46:19 +0000 UTC" firstStartedPulling="2025-09-29 22:46:24.134532749 +0000 UTC m=+1188.444821572" lastFinishedPulling="2025-09-29 22:46:30.14763893 +0000 UTC m=+1194.457927753" observedRunningTime="2025-09-29 22:46:31.329857584 +0000 UTC m=+1195.640146437" watchObservedRunningTime="2025-09-29 22:46:31.331415012 +0000 UTC m=+1195.641703845" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.364878 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.364859501 podStartE2EDuration="7.364859501s" podCreationTimestamp="2025-09-29 22:46:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:31.358893145 +0000 UTC m=+1195.669181958" watchObservedRunningTime="2025-09-29 22:46:31.364859501 +0000 UTC m=+1195.675148314" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.412631 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-556f68d56-jxmlq"] Sep 29 22:46:31 crc kubenswrapper[4922]: E0929 22:46:31.413285 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="441161de-0f1a-47e8-8adb-5d6a74989cb4" containerName="keystone-bootstrap" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.413425 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="441161de-0f1a-47e8-8adb-5d6a74989cb4" containerName="keystone-bootstrap" Sep 29 22:46:31 crc kubenswrapper[4922]: E0929 22:46:31.413525 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830f2021-190b-481c-b306-3fd4e588eb9f" containerName="init" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.413610 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="830f2021-190b-481c-b306-3fd4e588eb9f" containerName="init" Sep 29 22:46:31 crc kubenswrapper[4922]: E0929 22:46:31.413708 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45cdffc3-9762-433a-b1d8-7e6af001c0ec" containerName="placement-db-sync" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.413784 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="45cdffc3-9762-433a-b1d8-7e6af001c0ec" containerName="placement-db-sync" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.414097 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="830f2021-190b-481c-b306-3fd4e588eb9f" containerName="init" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.414193 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="45cdffc3-9762-433a-b1d8-7e6af001c0ec" containerName="placement-db-sync" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.414312 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="441161de-0f1a-47e8-8adb-5d6a74989cb4" containerName="keystone-bootstrap" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.415559 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.419025 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.419334 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.419754 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-vv2bx" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.420575 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.420733 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.435154 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-556f68d56-jxmlq"] Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.512557 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-66cbdc5bdb-sfmk4"] Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.513510 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.520595 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-config-data\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.520646 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-combined-ca-bundle\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.520681 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-public-tls-certs\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.520734 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-internal-tls-certs\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.520778 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51f81c86-8f6d-4506-a940-5015032df5bd-logs\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.520887 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-scripts\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.521000 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr2t5\" (UniqueName: \"kubernetes.io/projected/51f81c86-8f6d-4506-a940-5015032df5bd-kube-api-access-wr2t5\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.523372 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.524415 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.524511 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-vvbbl" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.524625 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.524750 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.524879 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.572277 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-66cbdc5bdb-sfmk4"] Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622106 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr2t5\" (UniqueName: \"kubernetes.io/projected/51f81c86-8f6d-4506-a940-5015032df5bd-kube-api-access-wr2t5\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622149 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-combined-ca-bundle\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622209 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-credential-keys\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622231 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-fernet-keys\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622253 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-config-data\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622278 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-combined-ca-bundle\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622296 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-public-tls-certs\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622320 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-internal-tls-certs\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622342 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51f81c86-8f6d-4506-a940-5015032df5bd-logs\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622370 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pth5\" (UniqueName: \"kubernetes.io/projected/19fc05a2-d210-4c05-8341-eafdbcc40dc1-kube-api-access-6pth5\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622411 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-scripts\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622430 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-public-tls-certs\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622444 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-config-data\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622472 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-internal-tls-certs\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.622492 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-scripts\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.638827 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51f81c86-8f6d-4506-a940-5015032df5bd-logs\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.640241 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-public-tls-certs\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.640293 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-config-data\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.640916 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-combined-ca-bundle\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.640950 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-internal-tls-certs\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.642239 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr2t5\" (UniqueName: \"kubernetes.io/projected/51f81c86-8f6d-4506-a940-5015032df5bd-kube-api-access-wr2t5\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.649464 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-scripts\") pod \"placement-556f68d56-jxmlq\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.723841 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pth5\" (UniqueName: \"kubernetes.io/projected/19fc05a2-d210-4c05-8341-eafdbcc40dc1-kube-api-access-6pth5\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.723901 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-public-tls-certs\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.723917 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-config-data\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.723948 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-internal-tls-certs\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.723970 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-scripts\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.723988 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-combined-ca-bundle\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.724038 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-credential-keys\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.724054 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-fernet-keys\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.731346 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-config-data\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.732036 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-combined-ca-bundle\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.732242 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-scripts\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.732429 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-internal-tls-certs\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.732688 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-credential-keys\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.733049 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-public-tls-certs\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.733137 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-fernet-keys\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.741293 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pth5\" (UniqueName: \"kubernetes.io/projected/19fc05a2-d210-4c05-8341-eafdbcc40dc1-kube-api-access-6pth5\") pod \"keystone-66cbdc5bdb-sfmk4\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.794004 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:31 crc kubenswrapper[4922]: I0929 22:46:31.851928 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:32 crc kubenswrapper[4922]: I0929 22:46:32.241159 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-556f68d56-jxmlq"] Sep 29 22:46:32 crc kubenswrapper[4922]: W0929 22:46:32.260310 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51f81c86_8f6d_4506_a940_5015032df5bd.slice/crio-720c3aa536538d9da9cbed2c46c678d8960341138b7cb603422bb996d821a8ba WatchSource:0}: Error finding container 720c3aa536538d9da9cbed2c46c678d8960341138b7cb603422bb996d821a8ba: Status 404 returned error can't find the container with id 720c3aa536538d9da9cbed2c46c678d8960341138b7cb603422bb996d821a8ba Sep 29 22:46:32 crc kubenswrapper[4922]: I0929 22:46:32.313829 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"80abe265-931c-437c-a82e-768ff29f853a","Type":"ContainerStarted","Data":"df4dd4771a5699533dfaac86cfd7ab9d4cc4a35d468c739942bd0d86a26634a9"} Sep 29 22:46:32 crc kubenswrapper[4922]: I0929 22:46:32.315685 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-556f68d56-jxmlq" event={"ID":"51f81c86-8f6d-4506-a940-5015032df5bd","Type":"ContainerStarted","Data":"720c3aa536538d9da9cbed2c46c678d8960341138b7cb603422bb996d821a8ba"} Sep 29 22:46:32 crc kubenswrapper[4922]: I0929 22:46:32.318020 4922 generic.go:334] "Generic (PLEG): container finished" podID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerID="86fdc90b39b1b811f3dc738d107bbe0b5c017f5d91f898435027426b66e77afc" exitCode=0 Sep 29 22:46:32 crc kubenswrapper[4922]: I0929 22:46:32.318118 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"909788bb-9053-4ed7-85d8-1c36ea96066c","Type":"ContainerDied","Data":"86fdc90b39b1b811f3dc738d107bbe0b5c017f5d91f898435027426b66e77afc"} Sep 29 22:46:32 crc kubenswrapper[4922]: I0929 22:46:32.318206 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"909788bb-9053-4ed7-85d8-1c36ea96066c","Type":"ContainerDied","Data":"8bcf0c93a244256c64ddedaff63fce16213a016c93e245bdde1d341d23f26a0d"} Sep 29 22:46:32 crc kubenswrapper[4922]: I0929 22:46:32.318174 4922 generic.go:334] "Generic (PLEG): container finished" podID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerID="8bcf0c93a244256c64ddedaff63fce16213a016c93e245bdde1d341d23f26a0d" exitCode=143 Sep 29 22:46:32 crc kubenswrapper[4922]: I0929 22:46:32.364402 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-66cbdc5bdb-sfmk4"] Sep 29 22:46:32 crc kubenswrapper[4922]: W0929 22:46:32.375580 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19fc05a2_d210_4c05_8341_eafdbcc40dc1.slice/crio-fa6b9b72afd5c458d18550633466c69ce65cc3f62fdb3620dbc165163cd37949 WatchSource:0}: Error finding container fa6b9b72afd5c458d18550633466c69ce65cc3f62fdb3620dbc165163cd37949: Status 404 returned error can't find the container with id fa6b9b72afd5c458d18550633466c69ce65cc3f62fdb3620dbc165163cd37949 Sep 29 22:46:33 crc kubenswrapper[4922]: I0929 22:46:33.331160 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-556f68d56-jxmlq" event={"ID":"51f81c86-8f6d-4506-a940-5015032df5bd","Type":"ContainerStarted","Data":"b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7"} Sep 29 22:46:33 crc kubenswrapper[4922]: I0929 22:46:33.334640 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-66cbdc5bdb-sfmk4" event={"ID":"19fc05a2-d210-4c05-8341-eafdbcc40dc1","Type":"ContainerStarted","Data":"f71b4f675a06896c34fd4e0be64b4fe734c8451e3903876095b9c46d095cd09a"} Sep 29 22:46:33 crc kubenswrapper[4922]: I0929 22:46:33.334664 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-66cbdc5bdb-sfmk4" event={"ID":"19fc05a2-d210-4c05-8341-eafdbcc40dc1","Type":"ContainerStarted","Data":"fa6b9b72afd5c458d18550633466c69ce65cc3f62fdb3620dbc165163cd37949"} Sep 29 22:46:33 crc kubenswrapper[4922]: I0929 22:46:33.334763 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="80abe265-931c-437c-a82e-768ff29f853a" containerName="glance-log" containerID="cri-o://7e39e24258fe808639c97435d4ac9f91b722f961d68853b5ee3288a90da7acf3" gracePeriod=30 Sep 29 22:46:33 crc kubenswrapper[4922]: I0929 22:46:33.335487 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="80abe265-931c-437c-a82e-768ff29f853a" containerName="glance-httpd" containerID="cri-o://df4dd4771a5699533dfaac86cfd7ab9d4cc4a35d468c739942bd0d86a26634a9" gracePeriod=30 Sep 29 22:46:33 crc kubenswrapper[4922]: I0929 22:46:33.366562 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.366512928 podStartE2EDuration="9.366512928s" podCreationTimestamp="2025-09-29 22:46:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:33.352357742 +0000 UTC m=+1197.662646585" watchObservedRunningTime="2025-09-29 22:46:33.366512928 +0000 UTC m=+1197.676801761" Sep 29 22:46:33 crc kubenswrapper[4922]: I0929 22:46:33.373862 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-66cbdc5bdb-sfmk4" podStartSLOduration=2.373817627 podStartE2EDuration="2.373817627s" podCreationTimestamp="2025-09-29 22:46:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:33.37148541 +0000 UTC m=+1197.681774223" watchObservedRunningTime="2025-09-29 22:46:33.373817627 +0000 UTC m=+1197.684106450" Sep 29 22:46:34 crc kubenswrapper[4922]: I0929 22:46:34.349045 4922 generic.go:334] "Generic (PLEG): container finished" podID="80abe265-931c-437c-a82e-768ff29f853a" containerID="df4dd4771a5699533dfaac86cfd7ab9d4cc4a35d468c739942bd0d86a26634a9" exitCode=0 Sep 29 22:46:34 crc kubenswrapper[4922]: I0929 22:46:34.349077 4922 generic.go:334] "Generic (PLEG): container finished" podID="80abe265-931c-437c-a82e-768ff29f853a" containerID="7e39e24258fe808639c97435d4ac9f91b722f961d68853b5ee3288a90da7acf3" exitCode=143 Sep 29 22:46:34 crc kubenswrapper[4922]: I0929 22:46:34.349185 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"80abe265-931c-437c-a82e-768ff29f853a","Type":"ContainerDied","Data":"df4dd4771a5699533dfaac86cfd7ab9d4cc4a35d468c739942bd0d86a26634a9"} Sep 29 22:46:34 crc kubenswrapper[4922]: I0929 22:46:34.349265 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"80abe265-931c-437c-a82e-768ff29f853a","Type":"ContainerDied","Data":"7e39e24258fe808639c97435d4ac9f91b722f961d68853b5ee3288a90da7acf3"} Sep 29 22:46:34 crc kubenswrapper[4922]: I0929 22:46:34.349292 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:46:36 crc kubenswrapper[4922]: I0929 22:46:36.378283 4922 generic.go:334] "Generic (PLEG): container finished" podID="f7a17d6c-65b4-4006-8d06-9942b01955d1" containerID="f53c7b3b9f62eb3961e0c29dd8e4620868ae009185345b663011a716c95d6ad7" exitCode=0 Sep 29 22:46:36 crc kubenswrapper[4922]: I0929 22:46:36.378538 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-prv7k" event={"ID":"f7a17d6c-65b4-4006-8d06-9942b01955d1","Type":"ContainerDied","Data":"f53c7b3b9f62eb3961e0c29dd8e4620868ae009185345b663011a716c95d6ad7"} Sep 29 22:46:39 crc kubenswrapper[4922]: I0929 22:46:39.818545 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:46:39 crc kubenswrapper[4922]: I0929 22:46:39.887937 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-llwhb"] Sep 29 22:46:39 crc kubenswrapper[4922]: I0929 22:46:39.888155 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" podUID="c672b20c-8486-4def-9b55-6907518cb710" containerName="dnsmasq-dns" containerID="cri-o://fd728657c94eab24147d3c2acb47f39b080b44c782f519f650b2d82010f8c22d" gracePeriod=10 Sep 29 22:46:40 crc kubenswrapper[4922]: I0929 22:46:40.415912 4922 generic.go:334] "Generic (PLEG): container finished" podID="c672b20c-8486-4def-9b55-6907518cb710" containerID="fd728657c94eab24147d3c2acb47f39b080b44c782f519f650b2d82010f8c22d" exitCode=0 Sep 29 22:46:40 crc kubenswrapper[4922]: I0929 22:46:40.416058 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" event={"ID":"c672b20c-8486-4def-9b55-6907518cb710","Type":"ContainerDied","Data":"fd728657c94eab24147d3c2acb47f39b080b44c782f519f650b2d82010f8c22d"} Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.163921 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.291355 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-combined-ca-bundle\") pod \"f7a17d6c-65b4-4006-8d06-9942b01955d1\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.291503 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-db-sync-config-data\") pod \"f7a17d6c-65b4-4006-8d06-9942b01955d1\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.291579 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngfgk\" (UniqueName: \"kubernetes.io/projected/f7a17d6c-65b4-4006-8d06-9942b01955d1-kube-api-access-ngfgk\") pod \"f7a17d6c-65b4-4006-8d06-9942b01955d1\" (UID: \"f7a17d6c-65b4-4006-8d06-9942b01955d1\") " Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.307525 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7a17d6c-65b4-4006-8d06-9942b01955d1-kube-api-access-ngfgk" (OuterVolumeSpecName: "kube-api-access-ngfgk") pod "f7a17d6c-65b4-4006-8d06-9942b01955d1" (UID: "f7a17d6c-65b4-4006-8d06-9942b01955d1"). InnerVolumeSpecName "kube-api-access-ngfgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.313314 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f7a17d6c-65b4-4006-8d06-9942b01955d1" (UID: "f7a17d6c-65b4-4006-8d06-9942b01955d1"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.333867 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7a17d6c-65b4-4006-8d06-9942b01955d1" (UID: "f7a17d6c-65b4-4006-8d06-9942b01955d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.394873 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.394952 4922 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f7a17d6c-65b4-4006-8d06-9942b01955d1-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.394978 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngfgk\" (UniqueName: \"kubernetes.io/projected/f7a17d6c-65b4-4006-8d06-9942b01955d1-kube-api-access-ngfgk\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.426542 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-prv7k" event={"ID":"f7a17d6c-65b4-4006-8d06-9942b01955d1","Type":"ContainerDied","Data":"5746a82a2853a7233df6ebe66c59ba55422e27157c6e3aecac57f0b7b2150098"} Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.426882 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5746a82a2853a7233df6ebe66c59ba55422e27157c6e3aecac57f0b7b2150098" Sep 29 22:46:41 crc kubenswrapper[4922]: I0929 22:46:41.426963 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-prv7k" Sep 29 22:46:42 crc kubenswrapper[4922]: E0929 22:46:42.252190 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 29 22:46:42 crc kubenswrapper[4922]: E0929 22:46:42.252380 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2qrwf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-782mg_openstack(43650a1d-3702-40e1-b4ef-2cc2f2343c28): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:46:42 crc kubenswrapper[4922]: E0929 22:46:42.254483 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-782mg" podUID="43650a1d-3702-40e1-b4ef-2cc2f2343c28" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.339615 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.514125 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.516913 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-logs\") pod \"909788bb-9053-4ed7-85d8-1c36ea96066c\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.516969 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nk4h6\" (UniqueName: \"kubernetes.io/projected/909788bb-9053-4ed7-85d8-1c36ea96066c-kube-api-access-nk4h6\") pod \"909788bb-9053-4ed7-85d8-1c36ea96066c\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.517048 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-scripts\") pod \"909788bb-9053-4ed7-85d8-1c36ea96066c\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.517110 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-httpd-run\") pod \"909788bb-9053-4ed7-85d8-1c36ea96066c\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.517159 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"909788bb-9053-4ed7-85d8-1c36ea96066c\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.517188 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-config-data\") pod \"909788bb-9053-4ed7-85d8-1c36ea96066c\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.517257 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-combined-ca-bundle\") pod \"909788bb-9053-4ed7-85d8-1c36ea96066c\" (UID: \"909788bb-9053-4ed7-85d8-1c36ea96066c\") " Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.518835 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "909788bb-9053-4ed7-85d8-1c36ea96066c" (UID: "909788bb-9053-4ed7-85d8-1c36ea96066c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.521607 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:42 crc kubenswrapper[4922]: E0929 22:46:42.525865 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-782mg" podUID="43650a1d-3702-40e1-b4ef-2cc2f2343c28" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.527703 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-logs" (OuterVolumeSpecName: "logs") pod "909788bb-9053-4ed7-85d8-1c36ea96066c" (UID: "909788bb-9053-4ed7-85d8-1c36ea96066c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.587897 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/909788bb-9053-4ed7-85d8-1c36ea96066c-kube-api-access-nk4h6" (OuterVolumeSpecName: "kube-api-access-nk4h6") pod "909788bb-9053-4ed7-85d8-1c36ea96066c" (UID: "909788bb-9053-4ed7-85d8-1c36ea96066c"). InnerVolumeSpecName "kube-api-access-nk4h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.590841 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "909788bb-9053-4ed7-85d8-1c36ea96066c" (UID: "909788bb-9053-4ed7-85d8-1c36ea96066c"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.590914 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-scripts" (OuterVolumeSpecName: "scripts") pod "909788bb-9053-4ed7-85d8-1c36ea96066c" (UID: "909788bb-9053-4ed7-85d8-1c36ea96066c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.617682 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "909788bb-9053-4ed7-85d8-1c36ea96066c" (UID: "909788bb-9053-4ed7-85d8-1c36ea96066c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.619835 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"909788bb-9053-4ed7-85d8-1c36ea96066c","Type":"ContainerDied","Data":"e25888681039c2866b46d92b03872bd1121ef48fe292204590389a7009c2b3b8"} Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.619876 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-8f78686f5-pj8pr"] Sep 29 22:46:42 crc kubenswrapper[4922]: E0929 22:46:42.620217 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a17d6c-65b4-4006-8d06-9942b01955d1" containerName="barbican-db-sync" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.620229 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a17d6c-65b4-4006-8d06-9942b01955d1" containerName="barbican-db-sync" Sep 29 22:46:42 crc kubenswrapper[4922]: E0929 22:46:42.620242 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerName="glance-log" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.620247 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerName="glance-log" Sep 29 22:46:42 crc kubenswrapper[4922]: E0929 22:46:42.620257 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerName="glance-httpd" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.620263 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerName="glance-httpd" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.620436 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a17d6c-65b4-4006-8d06-9942b01955d1" containerName="barbican-db-sync" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.620446 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerName="glance-log" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.620461 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="909788bb-9053-4ed7-85d8-1c36ea96066c" containerName="glance-httpd" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.621562 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-8f78686f5-pj8pr"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.621582 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.621602 4922 scope.go:117] "RemoveContainer" containerID="86fdc90b39b1b811f3dc738d107bbe0b5c017f5d91f898435027426b66e77afc" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.626064 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.628561 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.630321 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.630342 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.630365 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/909788bb-9053-4ed7-85d8-1c36ea96066c-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.630374 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nk4h6\" (UniqueName: \"kubernetes.io/projected/909788bb-9053-4ed7-85d8-1c36ea96066c-kube-api-access-nk4h6\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.630384 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.632302 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.633363 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-nkbxk" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.633521 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.644901 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.645053 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.660679 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-hc77m"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.663980 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-config-data" (OuterVolumeSpecName: "config-data") pod "909788bb-9053-4ed7-85d8-1c36ea96066c" (UID: "909788bb-9053-4ed7-85d8-1c36ea96066c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.665028 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.670013 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.683577 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-hc77m"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.700201 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5c6dd6984b-wltr5"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.701900 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.704143 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.707442 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c6dd6984b-wltr5"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733205 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-combined-ca-bundle\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733274 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data-custom\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733303 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733320 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4799fcf9-24e7-4c61-9e5e-109105ec7003-logs\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733342 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733358 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-combined-ca-bundle\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733469 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98ffad34-9721-4849-84ba-f14c518250ac-logs\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733495 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6rgk\" (UniqueName: \"kubernetes.io/projected/4799fcf9-24e7-4c61-9e5e-109105ec7003-kube-api-access-x6rgk\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733533 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-sb\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733550 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data-custom\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733575 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvflg\" (UniqueName: \"kubernetes.io/projected/c89b16a7-56fb-4ceb-8cc6-c382d983d575-kube-api-access-dvflg\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733594 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-svc\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733626 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jb72\" (UniqueName: \"kubernetes.io/projected/98ffad34-9721-4849-84ba-f14c518250ac-kube-api-access-9jb72\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733642 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-config\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733665 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-nb\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733698 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-swift-storage-0\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733744 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.733755 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/909788bb-9053-4ed7-85d8-1c36ea96066c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.838076 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98ffad34-9721-4849-84ba-f14c518250ac-logs\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.838162 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6rgk\" (UniqueName: \"kubernetes.io/projected/4799fcf9-24e7-4c61-9e5e-109105ec7003-kube-api-access-x6rgk\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.838818 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-sb\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.838848 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data-custom\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.839269 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvflg\" (UniqueName: \"kubernetes.io/projected/c89b16a7-56fb-4ceb-8cc6-c382d983d575-kube-api-access-dvflg\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.839368 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data-custom\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.839448 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-svc\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.839465 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-logs\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.839733 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-combined-ca-bundle\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840085 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jb72\" (UniqueName: \"kubernetes.io/projected/98ffad34-9721-4849-84ba-f14c518250ac-kube-api-access-9jb72\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840240 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-config\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840264 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840500 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-nb\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840546 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2wc8\" (UniqueName: \"kubernetes.io/projected/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-kube-api-access-v2wc8\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840564 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-swift-storage-0\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840615 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-combined-ca-bundle\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840703 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data-custom\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840735 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840757 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4799fcf9-24e7-4c61-9e5e-109105ec7003-logs\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840777 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.840795 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-combined-ca-bundle\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.844744 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98ffad34-9721-4849-84ba-f14c518250ac-logs\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.845153 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-svc\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.846074 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-config\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.846098 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-nb\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.846132 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-sb\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.846158 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4799fcf9-24e7-4c61-9e5e-109105ec7003-logs\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.848795 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-swift-storage-0\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.854610 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.856320 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data-custom\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.856809 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data-custom\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.857239 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jb72\" (UniqueName: \"kubernetes.io/projected/98ffad34-9721-4849-84ba-f14c518250ac-kube-api-access-9jb72\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.858066 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.858251 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-combined-ca-bundle\") pod \"barbican-worker-8f78686f5-pj8pr\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.862313 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-combined-ca-bundle\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.865023 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6rgk\" (UniqueName: \"kubernetes.io/projected/4799fcf9-24e7-4c61-9e5e-109105ec7003-kube-api-access-x6rgk\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.870343 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvflg\" (UniqueName: \"kubernetes.io/projected/c89b16a7-56fb-4ceb-8cc6-c382d983d575-kube-api-access-dvflg\") pod \"dnsmasq-dns-6d66f584d7-hc77m\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.870458 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data\") pod \"barbican-keystone-listener-8bfd4f7f6-rvh6z\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.870954 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.883570 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.885036 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.886279 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.888880 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.889219 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.942872 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.942927 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.942972 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.942997 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.943039 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data-custom\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.943298 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-logs\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.943381 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-combined-ca-bundle\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.943536 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.943620 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-logs\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.943758 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-logs\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.943761 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.943859 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.943905 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2wc8\" (UniqueName: \"kubernetes.io/projected/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-kube-api-access-v2wc8\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.944028 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b784l\" (UniqueName: \"kubernetes.io/projected/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-kube-api-access-b784l\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.948281 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.949291 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.949465 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-combined-ca-bundle\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.951994 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data-custom\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.960050 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2wc8\" (UniqueName: \"kubernetes.io/projected/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-kube-api-access-v2wc8\") pod \"barbican-api-5c6dd6984b-wltr5\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.961807 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:46:42 crc kubenswrapper[4922]: I0929 22:46:42.996048 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.016502 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.046201 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.046266 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.046300 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.046334 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.046616 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.047263 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.046383 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.052117 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-logs\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.052308 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.052420 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b784l\" (UniqueName: \"kubernetes.io/projected/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-kube-api-access-b784l\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.052903 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-logs\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.054438 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.055782 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.056426 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.057140 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.068894 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b784l\" (UniqueName: \"kubernetes.io/projected/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-kube-api-access-b784l\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.072549 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.200963 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.812665 4922 scope.go:117] "RemoveContainer" containerID="8bcf0c93a244256c64ddedaff63fce16213a016c93e245bdde1d341d23f26a0d" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.894214 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.899591 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.967245 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-config\") pod \"c672b20c-8486-4def-9b55-6907518cb710\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.967753 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-logs\") pod \"80abe265-931c-437c-a82e-768ff29f853a\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.967799 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-nb\") pod \"c672b20c-8486-4def-9b55-6907518cb710\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.967824 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-config-data\") pod \"80abe265-931c-437c-a82e-768ff29f853a\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.967872 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pct25\" (UniqueName: \"kubernetes.io/projected/c672b20c-8486-4def-9b55-6907518cb710-kube-api-access-pct25\") pod \"c672b20c-8486-4def-9b55-6907518cb710\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.967892 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-combined-ca-bundle\") pod \"80abe265-931c-437c-a82e-768ff29f853a\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.967909 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-dns-svc\") pod \"c672b20c-8486-4def-9b55-6907518cb710\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.968027 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"80abe265-931c-437c-a82e-768ff29f853a\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.968047 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-scripts\") pod \"80abe265-931c-437c-a82e-768ff29f853a\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.968090 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5pdh\" (UniqueName: \"kubernetes.io/projected/80abe265-931c-437c-a82e-768ff29f853a-kube-api-access-l5pdh\") pod \"80abe265-931c-437c-a82e-768ff29f853a\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.968117 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-sb\") pod \"c672b20c-8486-4def-9b55-6907518cb710\" (UID: \"c672b20c-8486-4def-9b55-6907518cb710\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.968136 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-httpd-run\") pod \"80abe265-931c-437c-a82e-768ff29f853a\" (UID: \"80abe265-931c-437c-a82e-768ff29f853a\") " Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.969217 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "80abe265-931c-437c-a82e-768ff29f853a" (UID: "80abe265-931c-437c-a82e-768ff29f853a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.970481 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-logs" (OuterVolumeSpecName: "logs") pod "80abe265-931c-437c-a82e-768ff29f853a" (UID: "80abe265-931c-437c-a82e-768ff29f853a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.973761 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-scripts" (OuterVolumeSpecName: "scripts") pod "80abe265-931c-437c-a82e-768ff29f853a" (UID: "80abe265-931c-437c-a82e-768ff29f853a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.977831 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80abe265-931c-437c-a82e-768ff29f853a-kube-api-access-l5pdh" (OuterVolumeSpecName: "kube-api-access-l5pdh") pod "80abe265-931c-437c-a82e-768ff29f853a" (UID: "80abe265-931c-437c-a82e-768ff29f853a"). InnerVolumeSpecName "kube-api-access-l5pdh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.984023 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c672b20c-8486-4def-9b55-6907518cb710-kube-api-access-pct25" (OuterVolumeSpecName: "kube-api-access-pct25") pod "c672b20c-8486-4def-9b55-6907518cb710" (UID: "c672b20c-8486-4def-9b55-6907518cb710"). InnerVolumeSpecName "kube-api-access-pct25". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:43 crc kubenswrapper[4922]: I0929 22:46:43.985544 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "80abe265-931c-437c-a82e-768ff29f853a" (UID: "80abe265-931c-437c-a82e-768ff29f853a"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.021838 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-config-data" (OuterVolumeSpecName: "config-data") pod "80abe265-931c-437c-a82e-768ff29f853a" (UID: "80abe265-931c-437c-a82e-768ff29f853a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.024491 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80abe265-931c-437c-a82e-768ff29f853a" (UID: "80abe265-931c-437c-a82e-768ff29f853a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.027443 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c672b20c-8486-4def-9b55-6907518cb710" (UID: "c672b20c-8486-4def-9b55-6907518cb710"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.033146 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c672b20c-8486-4def-9b55-6907518cb710" (UID: "c672b20c-8486-4def-9b55-6907518cb710"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.045478 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c672b20c-8486-4def-9b55-6907518cb710" (UID: "c672b20c-8486-4def-9b55-6907518cb710"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.054511 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-config" (OuterVolumeSpecName: "config") pod "c672b20c-8486-4def-9b55-6907518cb710" (UID: "c672b20c-8486-4def-9b55-6907518cb710"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071199 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071231 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071248 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pct25\" (UniqueName: \"kubernetes.io/projected/c672b20c-8486-4def-9b55-6907518cb710-kube-api-access-pct25\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071259 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071272 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071283 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071325 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071337 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80abe265-931c-437c-a82e-768ff29f853a-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071350 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5pdh\" (UniqueName: \"kubernetes.io/projected/80abe265-931c-437c-a82e-768ff29f853a-kube-api-access-l5pdh\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071361 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071372 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/80abe265-931c-437c-a82e-768ff29f853a-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.071383 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c672b20c-8486-4def-9b55-6907518cb710-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.095821 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.173142 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.442288 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="909788bb-9053-4ed7-85d8-1c36ea96066c" path="/var/lib/kubelet/pods/909788bb-9053-4ed7-85d8-1c36ea96066c/volumes" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.530736 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" event={"ID":"c672b20c-8486-4def-9b55-6907518cb710","Type":"ContainerDied","Data":"1c77d4828900bef83d607978bda7e94dc7949bc51f42fba1760a536c2d436d7d"} Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.530797 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-llwhb" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.535808 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.535800 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"80abe265-931c-437c-a82e-768ff29f853a","Type":"ContainerDied","Data":"cfcd2d6968e223ab931ff80468a8feccd2ec594292ee30ea5472c384d011b2b8"} Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.556765 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-llwhb"] Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.572770 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-llwhb"] Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.585698 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.598228 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.605613 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:46:44 crc kubenswrapper[4922]: E0929 22:46:44.606014 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80abe265-931c-437c-a82e-768ff29f853a" containerName="glance-log" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.606032 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="80abe265-931c-437c-a82e-768ff29f853a" containerName="glance-log" Sep 29 22:46:44 crc kubenswrapper[4922]: E0929 22:46:44.606042 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80abe265-931c-437c-a82e-768ff29f853a" containerName="glance-httpd" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.606049 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="80abe265-931c-437c-a82e-768ff29f853a" containerName="glance-httpd" Sep 29 22:46:44 crc kubenswrapper[4922]: E0929 22:46:44.606078 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c672b20c-8486-4def-9b55-6907518cb710" containerName="dnsmasq-dns" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.606084 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c672b20c-8486-4def-9b55-6907518cb710" containerName="dnsmasq-dns" Sep 29 22:46:44 crc kubenswrapper[4922]: E0929 22:46:44.606098 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c672b20c-8486-4def-9b55-6907518cb710" containerName="init" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.606104 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c672b20c-8486-4def-9b55-6907518cb710" containerName="init" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.606265 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="80abe265-931c-437c-a82e-768ff29f853a" containerName="glance-log" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.606285 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c672b20c-8486-4def-9b55-6907518cb710" containerName="dnsmasq-dns" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.606299 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="80abe265-931c-437c-a82e-768ff29f853a" containerName="glance-httpd" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.607233 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.612847 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.614076 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.614341 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.680750 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-config-data\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.680823 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gh8j\" (UniqueName: \"kubernetes.io/projected/6316309d-31b2-4062-a285-322d33221ee6-kube-api-access-2gh8j\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.680861 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.680894 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-scripts\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.680917 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.680940 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.681002 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.681055 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-logs\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.781981 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-config-data\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782038 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gh8j\" (UniqueName: \"kubernetes.io/projected/6316309d-31b2-4062-a285-322d33221ee6-kube-api-access-2gh8j\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782062 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782086 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-scripts\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782103 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782121 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782166 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782207 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-logs\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782654 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782841 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.782911 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-logs\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.787484 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-config-data\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.788431 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.792793 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-scripts\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.799194 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.802226 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gh8j\" (UniqueName: \"kubernetes.io/projected/6316309d-31b2-4062-a285-322d33221ee6-kube-api-access-2gh8j\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.813117 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " pod="openstack/glance-default-external-api-0" Sep 29 22:46:44 crc kubenswrapper[4922]: I0929 22:46:44.937934 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.119793 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-75fb76f858-mmqwn"] Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.125483 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.127173 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.127331 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.133143 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75fb76f858-mmqwn"] Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.188884 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf7d4\" (UniqueName: \"kubernetes.io/projected/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-kube-api-access-lf7d4\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.188920 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-combined-ca-bundle\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.188945 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.189020 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-public-tls-certs\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.189134 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data-custom\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.189166 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-logs\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.189186 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-internal-tls-certs\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.296913 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-internal-tls-certs\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.297196 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf7d4\" (UniqueName: \"kubernetes.io/projected/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-kube-api-access-lf7d4\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.297288 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-combined-ca-bundle\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.297365 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.297534 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-public-tls-certs\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.297667 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data-custom\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.297773 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-logs\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.298110 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-logs\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.301558 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-public-tls-certs\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.301625 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data-custom\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.302316 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-combined-ca-bundle\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.308779 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-internal-tls-certs\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: E0929 22:46:45.319185 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43fe2c56_fd4b_4e01_9550_49d15df8264f.slice/crio-b137252b3fcf80b102a7512521912ab1e7489cb1db512f4c41d4510733941949.scope\": RecentStats: unable to find data in memory cache]" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.328475 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.331930 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf7d4\" (UniqueName: \"kubernetes.io/projected/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-kube-api-access-lf7d4\") pod \"barbican-api-75fb76f858-mmqwn\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.447265 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.551797 4922 generic.go:334] "Generic (PLEG): container finished" podID="43fe2c56-fd4b-4e01-9550-49d15df8264f" containerID="b137252b3fcf80b102a7512521912ab1e7489cb1db512f4c41d4510733941949" exitCode=0 Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.551840 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-b7zw5" event={"ID":"43fe2c56-fd4b-4e01-9550-49d15df8264f","Type":"ContainerDied","Data":"b137252b3fcf80b102a7512521912ab1e7489cb1db512f4c41d4510733941949"} Sep 29 22:46:45 crc kubenswrapper[4922]: E0929 22:46:45.795446 4922 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/sg-core:latest" Sep 29 22:46:45 crc kubenswrapper[4922]: E0929 22:46:45.795897 4922 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/openstack-k8s-operators/sg-core:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:sg-core-conf-yaml,ReadOnly:false,MountPath:/etc/sg-core.conf.yaml,SubPath:sg-core.conf.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v2jzl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(9eda432e-f3e4-4a47-ab28-7175f2e28034): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 22:46:45 crc kubenswrapper[4922]: I0929 22:46:45.957540 4922 scope.go:117] "RemoveContainer" containerID="fd728657c94eab24147d3c2acb47f39b080b44c782f519f650b2d82010f8c22d" Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.118250 4922 scope.go:117] "RemoveContainer" containerID="d89cb411a6e68c912f20d4960314e51355cf8f2cea34465d64222d459952e11e" Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.190550 4922 scope.go:117] "RemoveContainer" containerID="df4dd4771a5699533dfaac86cfd7ab9d4cc4a35d468c739942bd0d86a26634a9" Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.288092 4922 scope.go:117] "RemoveContainer" containerID="7e39e24258fe808639c97435d4ac9f91b722f961d68853b5ee3288a90da7acf3" Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.434269 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80abe265-931c-437c-a82e-768ff29f853a" path="/var/lib/kubelet/pods/80abe265-931c-437c-a82e-768ff29f853a/volumes" Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.435181 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c672b20c-8486-4def-9b55-6907518cb710" path="/var/lib/kubelet/pods/c672b20c-8486-4def-9b55-6907518cb710/volumes" Sep 29 22:46:46 crc kubenswrapper[4922]: W0929 22:46:46.539884 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5f7e18e_208b_434c_a4ce_d2ab0ce15146.slice/crio-7f48016d3e869419105ed9762b60d07977a41c9829e22a2694b146e9632669f0 WatchSource:0}: Error finding container 7f48016d3e869419105ed9762b60d07977a41c9829e22a2694b146e9632669f0: Status 404 returned error can't find the container with id 7f48016d3e869419105ed9762b60d07977a41c9829e22a2694b146e9632669f0 Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.540658 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c6dd6984b-wltr5"] Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.617546 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c6dd6984b-wltr5" event={"ID":"e5f7e18e-208b-434c-a4ce-d2ab0ce15146","Type":"ContainerStarted","Data":"7f48016d3e869419105ed9762b60d07977a41c9829e22a2694b146e9632669f0"} Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.632920 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-556f68d56-jxmlq" event={"ID":"51f81c86-8f6d-4506-a940-5015032df5bd","Type":"ContainerStarted","Data":"114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4"} Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.633243 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.633494 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.637319 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.656715 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-556f68d56-jxmlq" podStartSLOduration=15.656696765 podStartE2EDuration="15.656696765s" podCreationTimestamp="2025-09-29 22:46:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:46.655543336 +0000 UTC m=+1210.965832149" watchObservedRunningTime="2025-09-29 22:46:46.656696765 +0000 UTC m=+1210.966985568" Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.783714 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z"] Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.797217 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-75fb76f858-mmqwn"] Sep 29 22:46:46 crc kubenswrapper[4922]: W0929 22:46:46.827742 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e448a92_1d3a_4bf6_a7f4_dbedb7b829d3.slice/crio-b58903704e3604a9d7a8b3492852fd57de1554a4810248296bcf01ec3baf7782 WatchSource:0}: Error finding container b58903704e3604a9d7a8b3492852fd57de1554a4810248296bcf01ec3baf7782: Status 404 returned error can't find the container with id b58903704e3604a9d7a8b3492852fd57de1554a4810248296bcf01ec3baf7782 Sep 29 22:46:46 crc kubenswrapper[4922]: W0929 22:46:46.838253 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4799fcf9_24e7_4c61_9e5e_109105ec7003.slice/crio-c638d48f64d7d197b2acb87180e7f45bbb0455007dd477645ef338648572d8c7 WatchSource:0}: Error finding container c638d48f64d7d197b2acb87180e7f45bbb0455007dd477645ef338648572d8c7: Status 404 returned error can't find the container with id c638d48f64d7d197b2acb87180e7f45bbb0455007dd477645ef338648572d8c7 Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.910615 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:46:46 crc kubenswrapper[4922]: W0929 22:46:46.938183 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6316309d_31b2_4062_a285_322d33221ee6.slice/crio-bf94a062e5a18ae5eee10d2c84095501e780d421c209ff71768e682119a5304a WatchSource:0}: Error finding container bf94a062e5a18ae5eee10d2c84095501e780d421c209ff71768e682119a5304a: Status 404 returned error can't find the container with id bf94a062e5a18ae5eee10d2c84095501e780d421c209ff71768e682119a5304a Sep 29 22:46:46 crc kubenswrapper[4922]: I0929 22:46:46.999469 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-hc77m"] Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.006200 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-8f78686f5-pj8pr"] Sep 29 22:46:47 crc kubenswrapper[4922]: W0929 22:46:47.010282 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc89b16a7_56fb_4ceb_8cc6_c382d983d575.slice/crio-25d3d4015b3e2a983cd67107f188998056fc0f7bf81937a49604fcfa6ea417e8 WatchSource:0}: Error finding container 25d3d4015b3e2a983cd67107f188998056fc0f7bf81937a49604fcfa6ea417e8: Status 404 returned error can't find the container with id 25d3d4015b3e2a983cd67107f188998056fc0f7bf81937a49604fcfa6ea417e8 Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.052826 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.248705 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-config\") pod \"43fe2c56-fd4b-4e01-9550-49d15df8264f\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.248756 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkq4c\" (UniqueName: \"kubernetes.io/projected/43fe2c56-fd4b-4e01-9550-49d15df8264f-kube-api-access-zkq4c\") pod \"43fe2c56-fd4b-4e01-9550-49d15df8264f\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.248776 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-combined-ca-bundle\") pod \"43fe2c56-fd4b-4e01-9550-49d15df8264f\" (UID: \"43fe2c56-fd4b-4e01-9550-49d15df8264f\") " Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.256239 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43fe2c56-fd4b-4e01-9550-49d15df8264f-kube-api-access-zkq4c" (OuterVolumeSpecName: "kube-api-access-zkq4c") pod "43fe2c56-fd4b-4e01-9550-49d15df8264f" (UID: "43fe2c56-fd4b-4e01-9550-49d15df8264f"). InnerVolumeSpecName "kube-api-access-zkq4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.296225 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-config" (OuterVolumeSpecName: "config") pod "43fe2c56-fd4b-4e01-9550-49d15df8264f" (UID: "43fe2c56-fd4b-4e01-9550-49d15df8264f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.296697 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43fe2c56-fd4b-4e01-9550-49d15df8264f" (UID: "43fe2c56-fd4b-4e01-9550-49d15df8264f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.350601 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.350635 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkq4c\" (UniqueName: \"kubernetes.io/projected/43fe2c56-fd4b-4e01-9550-49d15df8264f-kube-api-access-zkq4c\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.350800 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43fe2c56-fd4b-4e01-9550-49d15df8264f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.756654 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" event={"ID":"4799fcf9-24e7-4c61-9e5e-109105ec7003","Type":"ContainerStarted","Data":"c638d48f64d7d197b2acb87180e7f45bbb0455007dd477645ef338648572d8c7"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.760092 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-hc77m"] Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.775183 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6316309d-31b2-4062-a285-322d33221ee6","Type":"ContainerStarted","Data":"bf94a062e5a18ae5eee10d2c84095501e780d421c209ff71768e682119a5304a"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.784934 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c6dd6984b-wltr5" event={"ID":"e5f7e18e-208b-434c-a4ce-d2ab0ce15146","Type":"ContainerStarted","Data":"806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.784982 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c6dd6984b-wltr5" event={"ID":"e5f7e18e-208b-434c-a4ce-d2ab0ce15146","Type":"ContainerStarted","Data":"700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.785546 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.785610 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.788572 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"898e76fa-e8bb-4354-802f-5a6f6c14c0b0","Type":"ContainerStarted","Data":"c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.788620 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"898e76fa-e8bb-4354-802f-5a6f6c14c0b0","Type":"ContainerStarted","Data":"ece89910a5e36e023fac8782901a68413fd0e751a7731d36c458d1cd9da22d07"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.808936 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-8f78686f5-pj8pr" event={"ID":"98ffad34-9721-4849-84ba-f14c518250ac","Type":"ContainerStarted","Data":"85360d25da017ae5ee690d5a3af4906df942e2cdead12892023e658ff64d17a6"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.813983 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-f5h89"] Sep 29 22:46:47 crc kubenswrapper[4922]: E0929 22:46:47.814958 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43fe2c56-fd4b-4e01-9550-49d15df8264f" containerName="neutron-db-sync" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.814983 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="43fe2c56-fd4b-4e01-9550-49d15df8264f" containerName="neutron-db-sync" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.815378 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="43fe2c56-fd4b-4e01-9550-49d15df8264f" containerName="neutron-db-sync" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.816816 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.835447 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-f5h89"] Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.836208 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5c6dd6984b-wltr5" podStartSLOduration=5.8361938129999995 podStartE2EDuration="5.836193813s" podCreationTimestamp="2025-09-29 22:46:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:47.817565097 +0000 UTC m=+1212.127853910" watchObservedRunningTime="2025-09-29 22:46:47.836193813 +0000 UTC m=+1212.146482616" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.868252 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75fb76f858-mmqwn" event={"ID":"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3","Type":"ContainerStarted","Data":"6900ea52bd3a44fd3677b19a3a356664f86ce5fb715b38eadb63dcaaa0a2a2c5"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.868328 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75fb76f858-mmqwn" event={"ID":"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3","Type":"ContainerStarted","Data":"172f258b5cd1d2a007268349f6866d6235b2344ef34219c5f1c44b3260c97af7"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.868340 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75fb76f858-mmqwn" event={"ID":"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3","Type":"ContainerStarted","Data":"b58903704e3604a9d7a8b3492852fd57de1554a4810248296bcf01ec3baf7782"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.871023 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.881581 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-54dbcc9b8d-c5whm"] Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.882124 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-b7zw5" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.884417 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-b7zw5" event={"ID":"43fe2c56-fd4b-4e01-9550-49d15df8264f","Type":"ContainerDied","Data":"1226b62c31d57bac603b763cf7f487331a9041277c99f973c0e6f7a2dfb0edb0"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.884945 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1226b62c31d57bac603b763cf7f487331a9041277c99f973c0e6f7a2dfb0edb0" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.884888 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.886881 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.887052 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.887157 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-wxrp8" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.887174 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.891858 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-54dbcc9b8d-c5whm"] Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.894015 4922 generic.go:334] "Generic (PLEG): container finished" podID="c89b16a7-56fb-4ceb-8cc6-c382d983d575" containerID="a76a5c321b16120fb70ad542c5fe974b798342d00cecbf7d49910d6959d51f3b" exitCode=0 Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.895139 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" event={"ID":"c89b16a7-56fb-4ceb-8cc6-c382d983d575","Type":"ContainerDied","Data":"a76a5c321b16120fb70ad542c5fe974b798342d00cecbf7d49910d6959d51f3b"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.895160 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" event={"ID":"c89b16a7-56fb-4ceb-8cc6-c382d983d575","Type":"ContainerStarted","Data":"25d3d4015b3e2a983cd67107f188998056fc0f7bf81937a49604fcfa6ea417e8"} Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.920163 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-75fb76f858-mmqwn" podStartSLOduration=2.9201450380000002 podStartE2EDuration="2.920145038s" podCreationTimestamp="2025-09-29 22:46:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:47.906480284 +0000 UTC m=+1212.216769087" watchObservedRunningTime="2025-09-29 22:46:47.920145038 +0000 UTC m=+1212.230433851" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.973614 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.973663 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-svc\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.973746 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.973761 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.973878 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-config\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:47 crc kubenswrapper[4922]: I0929 22:46:47.973911 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmzsc\" (UniqueName: \"kubernetes.io/projected/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-kube-api-access-zmzsc\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.075203 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-svc\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.075551 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mvpp\" (UniqueName: \"kubernetes.io/projected/c2331dae-0582-4728-8c1a-304d087ccf91-kube-api-access-7mvpp\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.075585 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-combined-ca-bundle\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.075628 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.075646 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.075670 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-config\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.075742 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-ovndb-tls-certs\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.075827 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-httpd-config\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.075995 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-config\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.076096 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmzsc\" (UniqueName: \"kubernetes.io/projected/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-kube-api-access-zmzsc\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.076109 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-svc\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.076356 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.077640 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.077879 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.078815 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.079691 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-config\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.097063 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmzsc\" (UniqueName: \"kubernetes.io/projected/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-kube-api-access-zmzsc\") pod \"dnsmasq-dns-688c87cc99-f5h89\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.177544 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mvpp\" (UniqueName: \"kubernetes.io/projected/c2331dae-0582-4728-8c1a-304d087ccf91-kube-api-access-7mvpp\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.177602 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-combined-ca-bundle\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.177638 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-config\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.177680 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-ovndb-tls-certs\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.177706 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-httpd-config\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.181567 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-httpd-config\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.184596 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-combined-ca-bundle\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.205202 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-ovndb-tls-certs\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.208114 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.210734 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mvpp\" (UniqueName: \"kubernetes.io/projected/c2331dae-0582-4728-8c1a-304d087ccf91-kube-api-access-7mvpp\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.218183 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-config\") pod \"neutron-54dbcc9b8d-c5whm\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.240258 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:48 crc kubenswrapper[4922]: E0929 22:46:48.273147 4922 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Sep 29 22:46:48 crc kubenswrapper[4922]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/c89b16a7-56fb-4ceb-8cc6-c382d983d575/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Sep 29 22:46:48 crc kubenswrapper[4922]: > podSandboxID="25d3d4015b3e2a983cd67107f188998056fc0f7bf81937a49604fcfa6ea417e8" Sep 29 22:46:48 crc kubenswrapper[4922]: E0929 22:46:48.273294 4922 kuberuntime_manager.go:1274] "Unhandled Error" err=< Sep 29 22:46:48 crc kubenswrapper[4922]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n554h646h555h654hd6h55fh5ch576h55dh5b5h697h6dh55bh699h56h569hd6hb5h549h675h66h57dh64bhbh96h7fh76hc8h547h7bh699h5cbq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dvflg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-6d66f584d7-hc77m_openstack(c89b16a7-56fb-4ceb-8cc6-c382d983d575): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/c89b16a7-56fb-4ceb-8cc6-c382d983d575/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Sep 29 22:46:48 crc kubenswrapper[4922]: > logger="UnhandledError" Sep 29 22:46:48 crc kubenswrapper[4922]: E0929 22:46:48.274551 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/c89b16a7-56fb-4ceb-8cc6-c382d983d575/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" podUID="c89b16a7-56fb-4ceb-8cc6-c382d983d575" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.599779 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.907732 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6316309d-31b2-4062-a285-322d33221ee6","Type":"ContainerStarted","Data":"1c96e92ffa0c3272cb8760ec8cc7d7c542955b5586ada5d9e51c0bc13a600399"} Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.918360 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"898e76fa-e8bb-4354-802f-5a6f6c14c0b0","Type":"ContainerStarted","Data":"d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70"} Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.919745 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.940072 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.940034908 podStartE2EDuration="6.940034908s" podCreationTimestamp="2025-09-29 22:46:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:48.939497655 +0000 UTC m=+1213.249786468" watchObservedRunningTime="2025-09-29 22:46:48.940034908 +0000 UTC m=+1213.250323721" Sep 29 22:46:48 crc kubenswrapper[4922]: I0929 22:46:48.965977 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-54dbcc9b8d-c5whm"] Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.086866 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-f5h89"] Sep 29 22:46:49 crc kubenswrapper[4922]: W0929 22:46:49.114874 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod934e0cb8_cc09_4c8d_9e74_938918b3fb3d.slice/crio-4ce3a7870161e86fba13809190df57f504e28fd14a712f769188647bcb983c4e WatchSource:0}: Error finding container 4ce3a7870161e86fba13809190df57f504e28fd14a712f769188647bcb983c4e: Status 404 returned error can't find the container with id 4ce3a7870161e86fba13809190df57f504e28fd14a712f769188647bcb983c4e Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.254020 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.414859 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-sb\") pod \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.415161 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-swift-storage-0\") pod \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.415190 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvflg\" (UniqueName: \"kubernetes.io/projected/c89b16a7-56fb-4ceb-8cc6-c382d983d575-kube-api-access-dvflg\") pod \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.415228 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-nb\") pod \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.415321 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-config\") pod \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.415342 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-svc\") pod \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\" (UID: \"c89b16a7-56fb-4ceb-8cc6-c382d983d575\") " Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.421626 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c89b16a7-56fb-4ceb-8cc6-c382d983d575-kube-api-access-dvflg" (OuterVolumeSpecName: "kube-api-access-dvflg") pod "c89b16a7-56fb-4ceb-8cc6-c382d983d575" (UID: "c89b16a7-56fb-4ceb-8cc6-c382d983d575"). InnerVolumeSpecName "kube-api-access-dvflg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.517855 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvflg\" (UniqueName: \"kubernetes.io/projected/c89b16a7-56fb-4ceb-8cc6-c382d983d575-kube-api-access-dvflg\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.531312 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c89b16a7-56fb-4ceb-8cc6-c382d983d575" (UID: "c89b16a7-56fb-4ceb-8cc6-c382d983d575"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.532846 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c89b16a7-56fb-4ceb-8cc6-c382d983d575" (UID: "c89b16a7-56fb-4ceb-8cc6-c382d983d575"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.538061 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c89b16a7-56fb-4ceb-8cc6-c382d983d575" (UID: "c89b16a7-56fb-4ceb-8cc6-c382d983d575"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.558688 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c89b16a7-56fb-4ceb-8cc6-c382d983d575" (UID: "c89b16a7-56fb-4ceb-8cc6-c382d983d575"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.562969 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-config" (OuterVolumeSpecName: "config") pod "c89b16a7-56fb-4ceb-8cc6-c382d983d575" (UID: "c89b16a7-56fb-4ceb-8cc6-c382d983d575"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.619849 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.619882 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.619892 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.619902 4922 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.619911 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c89b16a7-56fb-4ceb-8cc6-c382d983d575-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.936735 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" event={"ID":"934e0cb8-cc09-4c8d-9e74-938918b3fb3d","Type":"ContainerStarted","Data":"4ce3a7870161e86fba13809190df57f504e28fd14a712f769188647bcb983c4e"} Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.938392 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" event={"ID":"c89b16a7-56fb-4ceb-8cc6-c382d983d575","Type":"ContainerDied","Data":"25d3d4015b3e2a983cd67107f188998056fc0f7bf81937a49604fcfa6ea417e8"} Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.938435 4922 scope.go:117] "RemoveContainer" containerID="a76a5c321b16120fb70ad542c5fe974b798342d00cecbf7d49910d6959d51f3b" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.938634 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d66f584d7-hc77m" Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.943376 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54dbcc9b8d-c5whm" event={"ID":"c2331dae-0582-4728-8c1a-304d087ccf91","Type":"ContainerStarted","Data":"e48f6945cd05bf4bc4d065539efed61ac3bf62b1b97169255de847a552fdd5c6"} Sep 29 22:46:49 crc kubenswrapper[4922]: I0929 22:46:49.946602 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6316309d-31b2-4062-a285-322d33221ee6","Type":"ContainerStarted","Data":"06a3b9d4c3f527f22faa0d11a5b2108756dd731da4fd6b78f31da90cb6e66c13"} Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.015661 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-hc77m"] Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.056127 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d66f584d7-hc77m"] Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.423222 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-656896d5d5-fczbx"] Sep 29 22:46:50 crc kubenswrapper[4922]: E0929 22:46:50.423719 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c89b16a7-56fb-4ceb-8cc6-c382d983d575" containerName="init" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.423735 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c89b16a7-56fb-4ceb-8cc6-c382d983d575" containerName="init" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.423991 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c89b16a7-56fb-4ceb-8cc6-c382d983d575" containerName="init" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.425152 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.429548 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.429591 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.439141 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c89b16a7-56fb-4ceb-8cc6-c382d983d575" path="/var/lib/kubelet/pods/c89b16a7-56fb-4ceb-8cc6-c382d983d575/volumes" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.439886 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-656896d5d5-fczbx"] Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.542565 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkfjq\" (UniqueName: \"kubernetes.io/projected/cb84f99c-6d00-4023-9520-372992f3646e-kube-api-access-qkfjq\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.542653 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-internal-tls-certs\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.542677 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-public-tls-certs\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.542696 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-combined-ca-bundle\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.542711 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-httpd-config\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.542897 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-config\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.543050 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-ovndb-tls-certs\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.644547 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-internal-tls-certs\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.644594 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-public-tls-certs\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.644615 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-combined-ca-bundle\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.644631 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-httpd-config\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.644692 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-config\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.644742 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-ovndb-tls-certs\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.644771 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkfjq\" (UniqueName: \"kubernetes.io/projected/cb84f99c-6d00-4023-9520-372992f3646e-kube-api-access-qkfjq\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.649706 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-public-tls-certs\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.660245 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-ovndb-tls-certs\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.660335 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-httpd-config\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.660767 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-config\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.660828 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-combined-ca-bundle\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.663806 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-internal-tls-certs\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.665734 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkfjq\" (UniqueName: \"kubernetes.io/projected/cb84f99c-6d00-4023-9520-372992f3646e-kube-api-access-qkfjq\") pod \"neutron-656896d5d5-fczbx\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.760174 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.972081 4922 generic.go:334] "Generic (PLEG): container finished" podID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerID="f5d3801b7e8cbea28efd023c00a3eb05736c1c84ab301e46dded8aeb46c30e92" exitCode=0 Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.972142 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" event={"ID":"934e0cb8-cc09-4c8d-9e74-938918b3fb3d","Type":"ContainerDied","Data":"f5d3801b7e8cbea28efd023c00a3eb05736c1c84ab301e46dded8aeb46c30e92"} Sep 29 22:46:50 crc kubenswrapper[4922]: I0929 22:46:50.978717 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54dbcc9b8d-c5whm" event={"ID":"c2331dae-0582-4728-8c1a-304d087ccf91","Type":"ContainerStarted","Data":"8ba9ff1c3b417ba93c5589b96fa732dc7aeb23ff636b44c44c80eb7b3db4a482"} Sep 29 22:46:51 crc kubenswrapper[4922]: I0929 22:46:51.046365 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.046346318 podStartE2EDuration="7.046346318s" podCreationTimestamp="2025-09-29 22:46:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:51.038480715 +0000 UTC m=+1215.348769538" watchObservedRunningTime="2025-09-29 22:46:51.046346318 +0000 UTC m=+1215.356635131" Sep 29 22:46:53 crc kubenswrapper[4922]: I0929 22:46:53.202773 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:53 crc kubenswrapper[4922]: I0929 22:46:53.203031 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:53 crc kubenswrapper[4922]: I0929 22:46:53.243608 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:53 crc kubenswrapper[4922]: I0929 22:46:53.250188 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:53 crc kubenswrapper[4922]: I0929 22:46:53.476696 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-5c6dd6984b-wltr5" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 22:46:54 crc kubenswrapper[4922]: I0929 22:46:54.011220 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:54 crc kubenswrapper[4922]: I0929 22:46:54.011265 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:54 crc kubenswrapper[4922]: I0929 22:46:54.293059 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:54 crc kubenswrapper[4922]: I0929 22:46:54.298122 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:46:54 crc kubenswrapper[4922]: I0929 22:46:54.939392 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 22:46:54 crc kubenswrapper[4922]: I0929 22:46:54.939449 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 22:46:54 crc kubenswrapper[4922]: I0929 22:46:54.980517 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 22:46:54 crc kubenswrapper[4922]: I0929 22:46:54.986218 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 22:46:55 crc kubenswrapper[4922]: I0929 22:46:55.023307 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 22:46:55 crc kubenswrapper[4922]: I0929 22:46:55.023345 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 22:46:56 crc kubenswrapper[4922]: I0929 22:46:56.087763 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:56 crc kubenswrapper[4922]: I0929 22:46:56.088364 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 22:46:56 crc kubenswrapper[4922]: I0929 22:46:56.102532 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 22:46:57 crc kubenswrapper[4922]: I0929 22:46:57.002489 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:57 crc kubenswrapper[4922]: I0929 22:46:57.085198 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:46:57 crc kubenswrapper[4922]: I0929 22:46:57.180525 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c6dd6984b-wltr5"] Sep 29 22:46:57 crc kubenswrapper[4922]: I0929 22:46:57.180783 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c6dd6984b-wltr5" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api-log" containerID="cri-o://700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6" gracePeriod=30 Sep 29 22:46:57 crc kubenswrapper[4922]: I0929 22:46:57.180848 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c6dd6984b-wltr5" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api" containerID="cri-o://806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6" gracePeriod=30 Sep 29 22:46:57 crc kubenswrapper[4922]: I0929 22:46:57.452195 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 22:46:57 crc kubenswrapper[4922]: I0929 22:46:57.452347 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 22:46:57 crc kubenswrapper[4922]: I0929 22:46:57.538014 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 22:46:58 crc kubenswrapper[4922]: I0929 22:46:58.056215 4922 generic.go:334] "Generic (PLEG): container finished" podID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerID="700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6" exitCode=143 Sep 29 22:46:58 crc kubenswrapper[4922]: I0929 22:46:58.056435 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c6dd6984b-wltr5" event={"ID":"e5f7e18e-208b-434c-a4ce-d2ab0ce15146","Type":"ContainerDied","Data":"700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6"} Sep 29 22:46:58 crc kubenswrapper[4922]: E0929 22:46:58.259965 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" Sep 29 22:46:58 crc kubenswrapper[4922]: I0929 22:46:58.280020 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-656896d5d5-fczbx"] Sep 29 22:46:58 crc kubenswrapper[4922]: I0929 22:46:58.912350 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:46:58 crc kubenswrapper[4922]: I0929 22:46:58.912637 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.066114 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" event={"ID":"934e0cb8-cc09-4c8d-9e74-938918b3fb3d","Type":"ContainerStarted","Data":"a98166b4f4d9f2098a6ade5bbf2f9d1adec475ca2386cdd6e37391dc6c2c2b26"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.067226 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.068792 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-656896d5d5-fczbx" event={"ID":"cb84f99c-6d00-4023-9520-372992f3646e","Type":"ContainerStarted","Data":"0cb082da33df2e4d81994a52b8d0e177856277b29b39509b8cec8831f4d69eb3"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.068820 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-656896d5d5-fczbx" event={"ID":"cb84f99c-6d00-4023-9520-372992f3646e","Type":"ContainerStarted","Data":"ee6bfbd535ddb2568b60ca0420863ffa93c242860084bfd9305faf5ae6f7c154"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.068835 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-656896d5d5-fczbx" event={"ID":"cb84f99c-6d00-4023-9520-372992f3646e","Type":"ContainerStarted","Data":"a097d1c2ebd139ad62d98f468d014014923cb761c3954ba9d5533a33b1219c43"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.069414 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.071360 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54dbcc9b8d-c5whm" event={"ID":"c2331dae-0582-4728-8c1a-304d087ccf91","Type":"ContainerStarted","Data":"96e9425ddd60a2bf1a86df0f8f8a3afe27afdd3a1fc0d9e04d0422fb498c8e47"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.071857 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.073573 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" event={"ID":"4799fcf9-24e7-4c61-9e5e-109105ec7003","Type":"ContainerStarted","Data":"3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.073609 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" event={"ID":"4799fcf9-24e7-4c61-9e5e-109105ec7003","Type":"ContainerStarted","Data":"a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.075725 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-782mg" event={"ID":"43650a1d-3702-40e1-b4ef-2cc2f2343c28","Type":"ContainerStarted","Data":"8003474609ae65034e3d0969aa7d08f7334d96674fed267f7823a071e990171f"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.078438 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9eda432e-f3e4-4a47-ab28-7175f2e28034","Type":"ContainerStarted","Data":"a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.078576 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="ceilometer-central-agent" containerID="cri-o://6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc" gracePeriod=30 Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.078854 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.078910 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="proxy-httpd" containerID="cri-o://a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2" gracePeriod=30 Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.078971 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="ceilometer-notification-agent" containerID="cri-o://b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8" gracePeriod=30 Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.092754 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-8f78686f5-pj8pr" event={"ID":"98ffad34-9721-4849-84ba-f14c518250ac","Type":"ContainerStarted","Data":"71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.092797 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-8f78686f5-pj8pr" event={"ID":"98ffad34-9721-4849-84ba-f14c518250ac","Type":"ContainerStarted","Data":"8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9"} Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.098782 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" podStartSLOduration=12.098761986 podStartE2EDuration="12.098761986s" podCreationTimestamp="2025-09-29 22:46:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:59.093113378 +0000 UTC m=+1223.403402181" watchObservedRunningTime="2025-09-29 22:46:59.098761986 +0000 UTC m=+1223.409050799" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.112866 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-8f78686f5-pj8pr" podStartSLOduration=6.401958153 podStartE2EDuration="17.112834691s" podCreationTimestamp="2025-09-29 22:46:42 +0000 UTC" firstStartedPulling="2025-09-29 22:46:47.026935759 +0000 UTC m=+1211.337224572" lastFinishedPulling="2025-09-29 22:46:57.737812297 +0000 UTC m=+1222.048101110" observedRunningTime="2025-09-29 22:46:59.111656372 +0000 UTC m=+1223.421945185" watchObservedRunningTime="2025-09-29 22:46:59.112834691 +0000 UTC m=+1223.423123504" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.151196 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-656896d5d5-fczbx" podStartSLOduration=9.15117655 podStartE2EDuration="9.15117655s" podCreationTimestamp="2025-09-29 22:46:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:59.134655365 +0000 UTC m=+1223.444944178" watchObservedRunningTime="2025-09-29 22:46:59.15117655 +0000 UTC m=+1223.461465353" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.153714 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-782mg" podStartSLOduration=6.446372326 podStartE2EDuration="40.153708062s" podCreationTimestamp="2025-09-29 22:46:19 +0000 UTC" firstStartedPulling="2025-09-29 22:46:24.200235497 +0000 UTC m=+1188.510524320" lastFinishedPulling="2025-09-29 22:46:57.907571243 +0000 UTC m=+1222.217860056" observedRunningTime="2025-09-29 22:46:59.150675597 +0000 UTC m=+1223.460964410" watchObservedRunningTime="2025-09-29 22:46:59.153708062 +0000 UTC m=+1223.463996875" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.204727 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" podStartSLOduration=6.304361044 podStartE2EDuration="17.20471095s" podCreationTimestamp="2025-09-29 22:46:42 +0000 UTC" firstStartedPulling="2025-09-29 22:46:46.843891708 +0000 UTC m=+1211.154180531" lastFinishedPulling="2025-09-29 22:46:57.744241624 +0000 UTC m=+1222.054530437" observedRunningTime="2025-09-29 22:46:59.19284045 +0000 UTC m=+1223.503129273" watchObservedRunningTime="2025-09-29 22:46:59.20471095 +0000 UTC m=+1223.514999763" Sep 29 22:46:59 crc kubenswrapper[4922]: I0929 22:46:59.208304 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-54dbcc9b8d-c5whm" podStartSLOduration=12.208296928 podStartE2EDuration="12.208296928s" podCreationTimestamp="2025-09-29 22:46:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:46:59.168119624 +0000 UTC m=+1223.478408447" watchObservedRunningTime="2025-09-29 22:46:59.208296928 +0000 UTC m=+1223.518585741" Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.102593 4922 generic.go:334] "Generic (PLEG): container finished" podID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerID="a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2" exitCode=0 Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.102895 4922 generic.go:334] "Generic (PLEG): container finished" podID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerID="6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc" exitCode=0 Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.102698 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9eda432e-f3e4-4a47-ab28-7175f2e28034","Type":"ContainerDied","Data":"a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2"} Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.102952 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9eda432e-f3e4-4a47-ab28-7175f2e28034","Type":"ContainerDied","Data":"6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc"} Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.357815 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5c6dd6984b-wltr5" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.151:9311/healthcheck\": read tcp 10.217.0.2:56354->10.217.0.151:9311: read: connection reset by peer" Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.357858 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5c6dd6984b-wltr5" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.151:9311/healthcheck\": read tcp 10.217.0.2:56352->10.217.0.151:9311: read: connection reset by peer" Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.808874 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.954250 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-logs\") pod \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.954332 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-combined-ca-bundle\") pod \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.954423 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2wc8\" (UniqueName: \"kubernetes.io/projected/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-kube-api-access-v2wc8\") pod \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.954537 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data\") pod \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.954604 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data-custom\") pod \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\" (UID: \"e5f7e18e-208b-434c-a4ce-d2ab0ce15146\") " Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.954772 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-logs" (OuterVolumeSpecName: "logs") pod "e5f7e18e-208b-434c-a4ce-d2ab0ce15146" (UID: "e5f7e18e-208b-434c-a4ce-d2ab0ce15146"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.955317 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.962521 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e5f7e18e-208b-434c-a4ce-d2ab0ce15146" (UID: "e5f7e18e-208b-434c-a4ce-d2ab0ce15146"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:00 crc kubenswrapper[4922]: I0929 22:47:00.962540 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-kube-api-access-v2wc8" (OuterVolumeSpecName: "kube-api-access-v2wc8") pod "e5f7e18e-208b-434c-a4ce-d2ab0ce15146" (UID: "e5f7e18e-208b-434c-a4ce-d2ab0ce15146"). InnerVolumeSpecName "kube-api-access-v2wc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.006098 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5f7e18e-208b-434c-a4ce-d2ab0ce15146" (UID: "e5f7e18e-208b-434c-a4ce-d2ab0ce15146"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.016867 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data" (OuterVolumeSpecName: "config-data") pod "e5f7e18e-208b-434c-a4ce-d2ab0ce15146" (UID: "e5f7e18e-208b-434c-a4ce-d2ab0ce15146"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.057564 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.057961 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2wc8\" (UniqueName: \"kubernetes.io/projected/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-kube-api-access-v2wc8\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.057980 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.057993 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e5f7e18e-208b-434c-a4ce-d2ab0ce15146-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.116404 4922 generic.go:334] "Generic (PLEG): container finished" podID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerID="806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6" exitCode=0 Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.116534 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c6dd6984b-wltr5" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.116553 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c6dd6984b-wltr5" event={"ID":"e5f7e18e-208b-434c-a4ce-d2ab0ce15146","Type":"ContainerDied","Data":"806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6"} Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.116624 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c6dd6984b-wltr5" event={"ID":"e5f7e18e-208b-434c-a4ce-d2ab0ce15146","Type":"ContainerDied","Data":"7f48016d3e869419105ed9762b60d07977a41c9829e22a2694b146e9632669f0"} Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.116670 4922 scope.go:117] "RemoveContainer" containerID="806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.152851 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c6dd6984b-wltr5"] Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.153850 4922 scope.go:117] "RemoveContainer" containerID="700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.161696 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5c6dd6984b-wltr5"] Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.192154 4922 scope.go:117] "RemoveContainer" containerID="806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6" Sep 29 22:47:01 crc kubenswrapper[4922]: E0929 22:47:01.192707 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6\": container with ID starting with 806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6 not found: ID does not exist" containerID="806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.192782 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6"} err="failed to get container status \"806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6\": rpc error: code = NotFound desc = could not find container \"806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6\": container with ID starting with 806ae8576c639da51fa967079c17fbe75fb17f4a8646a41b284e397a0ea33bf6 not found: ID does not exist" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.192826 4922 scope.go:117] "RemoveContainer" containerID="700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6" Sep 29 22:47:01 crc kubenswrapper[4922]: E0929 22:47:01.193227 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6\": container with ID starting with 700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6 not found: ID does not exist" containerID="700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6" Sep 29 22:47:01 crc kubenswrapper[4922]: I0929 22:47:01.193279 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6"} err="failed to get container status \"700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6\": rpc error: code = NotFound desc = could not find container \"700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6\": container with ID starting with 700c776e575115aa121e52d6a88373f5176cca9b7d2185c0da915a9e888754a6 not found: ID does not exist" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.076207 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.127192 4922 generic.go:334] "Generic (PLEG): container finished" podID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerID="b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8" exitCode=0 Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.127231 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9eda432e-f3e4-4a47-ab28-7175f2e28034","Type":"ContainerDied","Data":"b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8"} Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.127259 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9eda432e-f3e4-4a47-ab28-7175f2e28034","Type":"ContainerDied","Data":"bbb9fcdd1a8a609ce08d760176b4cf9c83e327a574971912e69da294807031dc"} Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.127259 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.127276 4922 scope.go:117] "RemoveContainer" containerID="a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.149137 4922 scope.go:117] "RemoveContainer" containerID="b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.170425 4922 scope.go:117] "RemoveContainer" containerID="6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.178513 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-sg-core-conf-yaml\") pod \"9eda432e-f3e4-4a47-ab28-7175f2e28034\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.178585 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-combined-ca-bundle\") pod \"9eda432e-f3e4-4a47-ab28-7175f2e28034\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.178632 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2jzl\" (UniqueName: \"kubernetes.io/projected/9eda432e-f3e4-4a47-ab28-7175f2e28034-kube-api-access-v2jzl\") pod \"9eda432e-f3e4-4a47-ab28-7175f2e28034\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.178739 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-run-httpd\") pod \"9eda432e-f3e4-4a47-ab28-7175f2e28034\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.178818 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-log-httpd\") pod \"9eda432e-f3e4-4a47-ab28-7175f2e28034\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.178877 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-scripts\") pod \"9eda432e-f3e4-4a47-ab28-7175f2e28034\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.178974 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-config-data\") pod \"9eda432e-f3e4-4a47-ab28-7175f2e28034\" (UID: \"9eda432e-f3e4-4a47-ab28-7175f2e28034\") " Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.179727 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9eda432e-f3e4-4a47-ab28-7175f2e28034" (UID: "9eda432e-f3e4-4a47-ab28-7175f2e28034"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.179806 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9eda432e-f3e4-4a47-ab28-7175f2e28034" (UID: "9eda432e-f3e4-4a47-ab28-7175f2e28034"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.183956 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-scripts" (OuterVolumeSpecName: "scripts") pod "9eda432e-f3e4-4a47-ab28-7175f2e28034" (UID: "9eda432e-f3e4-4a47-ab28-7175f2e28034"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.201684 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eda432e-f3e4-4a47-ab28-7175f2e28034-kube-api-access-v2jzl" (OuterVolumeSpecName: "kube-api-access-v2jzl") pod "9eda432e-f3e4-4a47-ab28-7175f2e28034" (UID: "9eda432e-f3e4-4a47-ab28-7175f2e28034"). InnerVolumeSpecName "kube-api-access-v2jzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.202677 4922 scope.go:117] "RemoveContainer" containerID="a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2" Sep 29 22:47:02 crc kubenswrapper[4922]: E0929 22:47:02.206947 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2\": container with ID starting with a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2 not found: ID does not exist" containerID="a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.207278 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2"} err="failed to get container status \"a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2\": rpc error: code = NotFound desc = could not find container \"a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2\": container with ID starting with a5877fbce1afd14ebcbec0ea2cdbdc8c628516ea4579e14ff9a4f525a9bc16e2 not found: ID does not exist" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.207471 4922 scope.go:117] "RemoveContainer" containerID="b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8" Sep 29 22:47:02 crc kubenswrapper[4922]: E0929 22:47:02.208019 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8\": container with ID starting with b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8 not found: ID does not exist" containerID="b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.208071 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8"} err="failed to get container status \"b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8\": rpc error: code = NotFound desc = could not find container \"b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8\": container with ID starting with b5a0e5e8bdcb4e5c9aff417e00f605fdd0e427abafe43ee77abf093ba6a4b3a8 not found: ID does not exist" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.208101 4922 scope.go:117] "RemoveContainer" containerID="6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc" Sep 29 22:47:02 crc kubenswrapper[4922]: E0929 22:47:02.208448 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc\": container with ID starting with 6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc not found: ID does not exist" containerID="6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.208490 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc"} err="failed to get container status \"6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc\": rpc error: code = NotFound desc = could not find container \"6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc\": container with ID starting with 6331fc625f4c8402407b8e2842a83b0cd3aa13742cea946dc1b0fead979f97cc not found: ID does not exist" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.215539 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9eda432e-f3e4-4a47-ab28-7175f2e28034" (UID: "9eda432e-f3e4-4a47-ab28-7175f2e28034"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.255975 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9eda432e-f3e4-4a47-ab28-7175f2e28034" (UID: "9eda432e-f3e4-4a47-ab28-7175f2e28034"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.280913 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.280960 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2jzl\" (UniqueName: \"kubernetes.io/projected/9eda432e-f3e4-4a47-ab28-7175f2e28034-kube-api-access-v2jzl\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.280974 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.280986 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9eda432e-f3e4-4a47-ab28-7175f2e28034-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.280997 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.281011 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.287973 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-config-data" (OuterVolumeSpecName: "config-data") pod "9eda432e-f3e4-4a47-ab28-7175f2e28034" (UID: "9eda432e-f3e4-4a47-ab28-7175f2e28034"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.384084 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9eda432e-f3e4-4a47-ab28-7175f2e28034-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.432914 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" path="/var/lib/kubelet/pods/e5f7e18e-208b-434c-a4ce-d2ab0ce15146/volumes" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.543956 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.554947 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.573202 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:02 crc kubenswrapper[4922]: E0929 22:47:02.573909 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api-log" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.574039 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api-log" Sep 29 22:47:02 crc kubenswrapper[4922]: E0929 22:47:02.574103 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="ceilometer-notification-agent" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.574161 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="ceilometer-notification-agent" Sep 29 22:47:02 crc kubenswrapper[4922]: E0929 22:47:02.574220 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="ceilometer-central-agent" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.574272 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="ceilometer-central-agent" Sep 29 22:47:02 crc kubenswrapper[4922]: E0929 22:47:02.574332 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="proxy-httpd" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.574384 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="proxy-httpd" Sep 29 22:47:02 crc kubenswrapper[4922]: E0929 22:47:02.584204 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.584388 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.584817 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api-log" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.584902 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="ceilometer-notification-agent" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.584979 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="proxy-httpd" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.585061 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" containerName="ceilometer-central-agent" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.585119 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f7e18e-208b-434c-a4ce-d2ab0ce15146" containerName="barbican-api" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.587021 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.590373 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.593468 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.605773 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.688470 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.688555 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-log-httpd\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.688600 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-run-httpd\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.688625 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-scripts\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.688763 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shbcq\" (UniqueName: \"kubernetes.io/projected/316ae59b-05c9-4efe-8447-1050e62719f6-kube-api-access-shbcq\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.688854 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.688915 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-config-data\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.790747 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.791095 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-config-data\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.791124 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.791189 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-log-httpd\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.791236 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-run-httpd\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.791262 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-scripts\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.791308 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shbcq\" (UniqueName: \"kubernetes.io/projected/316ae59b-05c9-4efe-8447-1050e62719f6-kube-api-access-shbcq\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.791607 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-log-httpd\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.791653 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-run-httpd\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.796773 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-config-data\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.796907 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.797721 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.808845 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-scripts\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.812857 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shbcq\" (UniqueName: \"kubernetes.io/projected/316ae59b-05c9-4efe-8447-1050e62719f6-kube-api-access-shbcq\") pod \"ceilometer-0\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " pod="openstack/ceilometer-0" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.862738 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:47:02 crc kubenswrapper[4922]: I0929 22:47:02.920399 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.211586 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.265924 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-pclvw"] Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.266167 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" podUID="41540d9c-6693-4f54-b2c4-827a507918b1" containerName="dnsmasq-dns" containerID="cri-o://2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141" gracePeriod=10 Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.449809 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.676809 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.767729 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.916541 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdpbf\" (UniqueName: \"kubernetes.io/projected/41540d9c-6693-4f54-b2c4-827a507918b1-kube-api-access-jdpbf\") pod \"41540d9c-6693-4f54-b2c4-827a507918b1\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.916605 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-nb\") pod \"41540d9c-6693-4f54-b2c4-827a507918b1\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.916642 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-svc\") pod \"41540d9c-6693-4f54-b2c4-827a507918b1\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.916688 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-sb\") pod \"41540d9c-6693-4f54-b2c4-827a507918b1\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.916822 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-config\") pod \"41540d9c-6693-4f54-b2c4-827a507918b1\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.916868 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-swift-storage-0\") pod \"41540d9c-6693-4f54-b2c4-827a507918b1\" (UID: \"41540d9c-6693-4f54-b2c4-827a507918b1\") " Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.921715 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41540d9c-6693-4f54-b2c4-827a507918b1-kube-api-access-jdpbf" (OuterVolumeSpecName: "kube-api-access-jdpbf") pod "41540d9c-6693-4f54-b2c4-827a507918b1" (UID: "41540d9c-6693-4f54-b2c4-827a507918b1"). InnerVolumeSpecName "kube-api-access-jdpbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.965621 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "41540d9c-6693-4f54-b2c4-827a507918b1" (UID: "41540d9c-6693-4f54-b2c4-827a507918b1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.965996 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-config" (OuterVolumeSpecName: "config") pod "41540d9c-6693-4f54-b2c4-827a507918b1" (UID: "41540d9c-6693-4f54-b2c4-827a507918b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.968924 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "41540d9c-6693-4f54-b2c4-827a507918b1" (UID: "41540d9c-6693-4f54-b2c4-827a507918b1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.971898 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "41540d9c-6693-4f54-b2c4-827a507918b1" (UID: "41540d9c-6693-4f54-b2c4-827a507918b1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:03 crc kubenswrapper[4922]: I0929 22:47:03.984861 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "41540d9c-6693-4f54-b2c4-827a507918b1" (UID: "41540d9c-6693-4f54-b2c4-827a507918b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.018265 4922 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.018291 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdpbf\" (UniqueName: \"kubernetes.io/projected/41540d9c-6693-4f54-b2c4-827a507918b1-kube-api-access-jdpbf\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.018302 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.018311 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.018320 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.018331 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41540d9c-6693-4f54-b2c4-827a507918b1-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.165507 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerStarted","Data":"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44"} Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.165845 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerStarted","Data":"312d34ebda3f0856a10f278d559c7b80ccc464fd4ef05c3ec55a3fa6cc87cf63"} Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.167568 4922 generic.go:334] "Generic (PLEG): container finished" podID="43650a1d-3702-40e1-b4ef-2cc2f2343c28" containerID="8003474609ae65034e3d0969aa7d08f7334d96674fed267f7823a071e990171f" exitCode=0 Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.167638 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-782mg" event={"ID":"43650a1d-3702-40e1-b4ef-2cc2f2343c28","Type":"ContainerDied","Data":"8003474609ae65034e3d0969aa7d08f7334d96674fed267f7823a071e990171f"} Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.171237 4922 generic.go:334] "Generic (PLEG): container finished" podID="41540d9c-6693-4f54-b2c4-827a507918b1" containerID="2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141" exitCode=0 Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.171305 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" event={"ID":"41540d9c-6693-4f54-b2c4-827a507918b1","Type":"ContainerDied","Data":"2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141"} Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.171348 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" event={"ID":"41540d9c-6693-4f54-b2c4-827a507918b1","Type":"ContainerDied","Data":"fc1ca8d792a73619a0707b10207cee9423fb4323322f46cbd10cdd8c08e7e4a0"} Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.171378 4922 scope.go:117] "RemoveContainer" containerID="2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.171425 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-pclvw" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.213242 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-pclvw"] Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.213838 4922 scope.go:117] "RemoveContainer" containerID="bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.232532 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-pclvw"] Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.241677 4922 scope.go:117] "RemoveContainer" containerID="2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141" Sep 29 22:47:04 crc kubenswrapper[4922]: E0929 22:47:04.242509 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141\": container with ID starting with 2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141 not found: ID does not exist" containerID="2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.242543 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141"} err="failed to get container status \"2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141\": rpc error: code = NotFound desc = could not find container \"2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141\": container with ID starting with 2d8069797d4df81aa031ee36d576a5891153b0dc56fd4a28750895378c48a141 not found: ID does not exist" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.242565 4922 scope.go:117] "RemoveContainer" containerID="bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc" Sep 29 22:47:04 crc kubenswrapper[4922]: E0929 22:47:04.242953 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc\": container with ID starting with bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc not found: ID does not exist" containerID="bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.243075 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc"} err="failed to get container status \"bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc\": rpc error: code = NotFound desc = could not find container \"bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc\": container with ID starting with bdc426066fbf468443679195960457f925443a9d8111779877cafa288a6985dc not found: ID does not exist" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.435120 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41540d9c-6693-4f54-b2c4-827a507918b1" path="/var/lib/kubelet/pods/41540d9c-6693-4f54-b2c4-827a507918b1/volumes" Sep 29 22:47:04 crc kubenswrapper[4922]: I0929 22:47:04.436094 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9eda432e-f3e4-4a47-ab28-7175f2e28034" path="/var/lib/kubelet/pods/9eda432e-f3e4-4a47-ab28-7175f2e28034/volumes" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.181484 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerStarted","Data":"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f"} Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.587887 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-782mg" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.760925 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qrwf\" (UniqueName: \"kubernetes.io/projected/43650a1d-3702-40e1-b4ef-2cc2f2343c28-kube-api-access-2qrwf\") pod \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.761034 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-scripts\") pod \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.761060 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-config-data\") pod \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.761139 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-db-sync-config-data\") pod \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.761163 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-combined-ca-bundle\") pod \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.761229 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/43650a1d-3702-40e1-b4ef-2cc2f2343c28-etc-machine-id\") pod \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\" (UID: \"43650a1d-3702-40e1-b4ef-2cc2f2343c28\") " Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.761658 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43650a1d-3702-40e1-b4ef-2cc2f2343c28-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "43650a1d-3702-40e1-b4ef-2cc2f2343c28" (UID: "43650a1d-3702-40e1-b4ef-2cc2f2343c28"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.770533 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "43650a1d-3702-40e1-b4ef-2cc2f2343c28" (UID: "43650a1d-3702-40e1-b4ef-2cc2f2343c28"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.771624 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43650a1d-3702-40e1-b4ef-2cc2f2343c28-kube-api-access-2qrwf" (OuterVolumeSpecName: "kube-api-access-2qrwf") pod "43650a1d-3702-40e1-b4ef-2cc2f2343c28" (UID: "43650a1d-3702-40e1-b4ef-2cc2f2343c28"). InnerVolumeSpecName "kube-api-access-2qrwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.780598 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-scripts" (OuterVolumeSpecName: "scripts") pod "43650a1d-3702-40e1-b4ef-2cc2f2343c28" (UID: "43650a1d-3702-40e1-b4ef-2cc2f2343c28"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.817621 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43650a1d-3702-40e1-b4ef-2cc2f2343c28" (UID: "43650a1d-3702-40e1-b4ef-2cc2f2343c28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.862938 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.862972 4922 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.862986 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.862998 4922 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/43650a1d-3702-40e1-b4ef-2cc2f2343c28-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.863010 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qrwf\" (UniqueName: \"kubernetes.io/projected/43650a1d-3702-40e1-b4ef-2cc2f2343c28-kube-api-access-2qrwf\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.875540 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-config-data" (OuterVolumeSpecName: "config-data") pod "43650a1d-3702-40e1-b4ef-2cc2f2343c28" (UID: "43650a1d-3702-40e1-b4ef-2cc2f2343c28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:05 crc kubenswrapper[4922]: I0929 22:47:05.964985 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43650a1d-3702-40e1-b4ef-2cc2f2343c28-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.191676 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-782mg" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.191668 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-782mg" event={"ID":"43650a1d-3702-40e1-b4ef-2cc2f2343c28","Type":"ContainerDied","Data":"c0233aed02eb9eaf3463aae814d16dd6841a797e296be52cbb760cae003ccfd3"} Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.192099 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0233aed02eb9eaf3463aae814d16dd6841a797e296be52cbb760cae003ccfd3" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.194009 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerStarted","Data":"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec"} Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.498077 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-njkfr"] Sep 29 22:47:06 crc kubenswrapper[4922]: E0929 22:47:06.498459 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41540d9c-6693-4f54-b2c4-827a507918b1" containerName="init" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.498471 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="41540d9c-6693-4f54-b2c4-827a507918b1" containerName="init" Sep 29 22:47:06 crc kubenswrapper[4922]: E0929 22:47:06.498482 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43650a1d-3702-40e1-b4ef-2cc2f2343c28" containerName="cinder-db-sync" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.498488 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="43650a1d-3702-40e1-b4ef-2cc2f2343c28" containerName="cinder-db-sync" Sep 29 22:47:06 crc kubenswrapper[4922]: E0929 22:47:06.498499 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41540d9c-6693-4f54-b2c4-827a507918b1" containerName="dnsmasq-dns" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.498504 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="41540d9c-6693-4f54-b2c4-827a507918b1" containerName="dnsmasq-dns" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.498655 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="41540d9c-6693-4f54-b2c4-827a507918b1" containerName="dnsmasq-dns" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.501719 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="43650a1d-3702-40e1-b4ef-2cc2f2343c28" containerName="cinder-db-sync" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.502788 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.518224 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.534083 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.544609 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zk68r" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.545071 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.554811 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-njkfr"] Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.555201 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.566644 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.624554 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680666 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680705 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680737 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680764 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680779 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-scripts\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680806 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tvzs\" (UniqueName: \"kubernetes.io/projected/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-kube-api-access-4tvzs\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680822 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-config\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680842 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680860 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680879 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680937 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6ps9\" (UniqueName: \"kubernetes.io/projected/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-kube-api-access-g6ps9\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.680990 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.708548 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.710151 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.712650 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.757221 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782568 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782609 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782635 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782660 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782676 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-scripts\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782701 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tvzs\" (UniqueName: \"kubernetes.io/projected/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-kube-api-access-4tvzs\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782722 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-config\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782741 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782759 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782777 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782831 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6ps9\" (UniqueName: \"kubernetes.io/projected/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-kube-api-access-g6ps9\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782882 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.782948 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.783783 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.784306 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.785208 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-config\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.785457 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.785877 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.790264 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.800800 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-scripts\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.802593 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.805483 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6ps9\" (UniqueName: \"kubernetes.io/projected/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-kube-api-access-g6ps9\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.806678 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tvzs\" (UniqueName: \"kubernetes.io/projected/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-kube-api-access-4tvzs\") pod \"dnsmasq-dns-6bb4fc677f-njkfr\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.809984 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.840049 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.883925 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4b6c\" (UniqueName: \"kubernetes.io/projected/62c8d811-f867-4ebf-8a23-bc9958b082fe-kube-api-access-m4b6c\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.883964 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data-custom\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.883995 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-scripts\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.884041 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c8d811-f867-4ebf-8a23-bc9958b082fe-logs\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.884074 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.884197 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.884230 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62c8d811-f867-4ebf-8a23-bc9958b082fe-etc-machine-id\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.885478 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.985951 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4b6c\" (UniqueName: \"kubernetes.io/projected/62c8d811-f867-4ebf-8a23-bc9958b082fe-kube-api-access-m4b6c\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.986195 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data-custom\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.986226 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-scripts\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.986271 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c8d811-f867-4ebf-8a23-bc9958b082fe-logs\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.986305 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.986345 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.986375 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62c8d811-f867-4ebf-8a23-bc9958b082fe-etc-machine-id\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.986464 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62c8d811-f867-4ebf-8a23-bc9958b082fe-etc-machine-id\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.986933 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c8d811-f867-4ebf-8a23-bc9958b082fe-logs\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.997581 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.997851 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data-custom\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.998407 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:06 crc kubenswrapper[4922]: I0929 22:47:06.998754 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-scripts\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:07 crc kubenswrapper[4922]: I0929 22:47:07.013068 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4b6c\" (UniqueName: \"kubernetes.io/projected/62c8d811-f867-4ebf-8a23-bc9958b082fe-kube-api-access-m4b6c\") pod \"cinder-api-0\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " pod="openstack/cinder-api-0" Sep 29 22:47:07 crc kubenswrapper[4922]: I0929 22:47:07.023252 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 22:47:07 crc kubenswrapper[4922]: I0929 22:47:07.219498 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerStarted","Data":"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3"} Sep 29 22:47:07 crc kubenswrapper[4922]: I0929 22:47:07.219943 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 22:47:07 crc kubenswrapper[4922]: I0929 22:47:07.246278 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.91424052 podStartE2EDuration="5.246261248s" podCreationTimestamp="2025-09-29 22:47:02 +0000 UTC" firstStartedPulling="2025-09-29 22:47:03.467477557 +0000 UTC m=+1227.777766370" lastFinishedPulling="2025-09-29 22:47:06.799498285 +0000 UTC m=+1231.109787098" observedRunningTime="2025-09-29 22:47:07.237446629 +0000 UTC m=+1231.547735442" watchObservedRunningTime="2025-09-29 22:47:07.246261248 +0000 UTC m=+1231.556550061" Sep 29 22:47:07 crc kubenswrapper[4922]: I0929 22:47:07.311305 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-njkfr"] Sep 29 22:47:07 crc kubenswrapper[4922]: I0929 22:47:07.370959 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:47:07 crc kubenswrapper[4922]: I0929 22:47:07.558287 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.239836 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0","Type":"ContainerStarted","Data":"9aa4d561c6f6f8e056bc239a75e5f7dd3286b6abbf5f5fbbac4a30c376a48b82"} Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.242213 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"62c8d811-f867-4ebf-8a23-bc9958b082fe","Type":"ContainerStarted","Data":"0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5"} Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.242238 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"62c8d811-f867-4ebf-8a23-bc9958b082fe","Type":"ContainerStarted","Data":"b55f696a20b1b6046fb4f8f7b5c381f3665bb8118297a2aca50c96bb4dbd0159"} Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.244732 4922 generic.go:334] "Generic (PLEG): container finished" podID="9cf777ed-7cc9-4035-8064-34cfb8b5af7d" containerID="6ebe0ac59859be244eb03a2e5ad1eb93a7f469e8ffaf1914c8e7baa6a65a0fec" exitCode=0 Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.245560 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" event={"ID":"9cf777ed-7cc9-4035-8064-34cfb8b5af7d","Type":"ContainerDied","Data":"6ebe0ac59859be244eb03a2e5ad1eb93a7f469e8ffaf1914c8e7baa6a65a0fec"} Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.245582 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" event={"ID":"9cf777ed-7cc9-4035-8064-34cfb8b5af7d","Type":"ContainerStarted","Data":"b43de1f36b88e11a998847ea123683f530f5261c3f9959929aeb2e340352c53f"} Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.578636 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.579824 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.583212 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.583263 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-9f4nd" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.583426 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.587754 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.722300 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.722356 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-combined-ca-bundle\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.722545 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffzjt\" (UniqueName: \"kubernetes.io/projected/25c9b137-8a15-477d-b87a-b4480c856551-kube-api-access-ffzjt\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.722696 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config-secret\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.824090 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffzjt\" (UniqueName: \"kubernetes.io/projected/25c9b137-8a15-477d-b87a-b4480c856551-kube-api-access-ffzjt\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.824412 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config-secret\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.824475 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.824519 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-combined-ca-bundle\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.825368 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.830459 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-combined-ca-bundle\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.831443 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config-secret\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.838493 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffzjt\" (UniqueName: \"kubernetes.io/projected/25c9b137-8a15-477d-b87a-b4480c856551-kube-api-access-ffzjt\") pod \"openstackclient\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " pod="openstack/openstackclient" Sep 29 22:47:08 crc kubenswrapper[4922]: I0929 22:47:08.900925 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.062080 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.258655 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" event={"ID":"9cf777ed-7cc9-4035-8064-34cfb8b5af7d","Type":"ContainerStarted","Data":"9a8238d677313fdeb8aeaabe1fa9e8a3c61bbbb36229e993ecda3a2986dd92bf"} Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.259736 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.260886 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0","Type":"ContainerStarted","Data":"02e9e68cd12d912f991f8675b0b90408a5423ec37e68999040cb666a8a3e2339"} Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.266876 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"62c8d811-f867-4ebf-8a23-bc9958b082fe","Type":"ContainerStarted","Data":"e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c"} Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.266974 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="ceilometer-central-agent" containerID="cri-o://5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44" gracePeriod=30 Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.267086 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="proxy-httpd" containerID="cri-o://add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3" gracePeriod=30 Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.267125 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="sg-core" containerID="cri-o://cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec" gracePeriod=30 Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.267155 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="ceilometer-notification-agent" containerID="cri-o://81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f" gracePeriod=30 Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.305723 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" podStartSLOduration=3.30570499 podStartE2EDuration="3.30570499s" podCreationTimestamp="2025-09-29 22:47:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:47:09.285100569 +0000 UTC m=+1233.595389382" watchObservedRunningTime="2025-09-29 22:47:09.30570499 +0000 UTC m=+1233.615993803" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.312502 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.312489718 podStartE2EDuration="3.312489718s" podCreationTimestamp="2025-09-29 22:47:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:47:09.306193012 +0000 UTC m=+1233.616481825" watchObservedRunningTime="2025-09-29 22:47:09.312489718 +0000 UTC m=+1233.622778531" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.485141 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.862138 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5fc58fc6cf-b5wq2"] Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.863782 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.866076 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.866291 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.866422 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.874025 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5fc58fc6cf-b5wq2"] Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.948469 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-run-httpd\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.948527 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-log-httpd\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.948584 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skjhx\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-kube-api-access-skjhx\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.948619 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-internal-tls-certs\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.948670 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-config-data\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.948688 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-etc-swift\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.948702 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-public-tls-certs\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:09 crc kubenswrapper[4922]: I0929 22:47:09.948734 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-combined-ca-bundle\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.050651 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-config-data\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.050730 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-etc-swift\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.050752 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-public-tls-certs\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.050789 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-combined-ca-bundle\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.050820 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-run-httpd\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.050846 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-log-httpd\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.050894 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skjhx\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-kube-api-access-skjhx\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.050929 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-internal-tls-certs\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.052091 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-log-httpd\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.052317 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-run-httpd\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.063789 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-config-data\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.065982 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-combined-ca-bundle\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.066075 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-etc-swift\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.066101 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-public-tls-certs\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.066766 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-internal-tls-certs\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.070856 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skjhx\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-kube-api-access-skjhx\") pod \"swift-proxy-5fc58fc6cf-b5wq2\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.143914 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.201902 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.279763 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.287783 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"25c9b137-8a15-477d-b87a-b4480c856551","Type":"ContainerStarted","Data":"93dc871d18011a5dcf0efe0438f4577bd2c7e575ba725629998dafa24e191b3f"} Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.292050 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0","Type":"ContainerStarted","Data":"f9b7b248b12398045d4d9c76d0495a3026675be48141a60602d16e0e54ed8b37"} Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.303923 4922 generic.go:334] "Generic (PLEG): container finished" podID="316ae59b-05c9-4efe-8447-1050e62719f6" containerID="add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3" exitCode=0 Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.303968 4922 generic.go:334] "Generic (PLEG): container finished" podID="316ae59b-05c9-4efe-8447-1050e62719f6" containerID="cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec" exitCode=2 Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.303975 4922 generic.go:334] "Generic (PLEG): container finished" podID="316ae59b-05c9-4efe-8447-1050e62719f6" containerID="81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f" exitCode=0 Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.303982 4922 generic.go:334] "Generic (PLEG): container finished" podID="316ae59b-05c9-4efe-8447-1050e62719f6" containerID="5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44" exitCode=0 Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.304684 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.304795 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerDied","Data":"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3"} Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.304821 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerDied","Data":"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec"} Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.304832 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerDied","Data":"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f"} Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.304842 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerDied","Data":"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44"} Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.304850 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"316ae59b-05c9-4efe-8447-1050e62719f6","Type":"ContainerDied","Data":"312d34ebda3f0856a10f278d559c7b80ccc464fd4ef05c3ec55a3fa6cc87cf63"} Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.304864 4922 scope.go:117] "RemoveContainer" containerID="add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.305044 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.355674 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.506768692 podStartE2EDuration="4.355655101s" podCreationTimestamp="2025-09-29 22:47:06 +0000 UTC" firstStartedPulling="2025-09-29 22:47:07.376691531 +0000 UTC m=+1231.686980344" lastFinishedPulling="2025-09-29 22:47:08.22557794 +0000 UTC m=+1232.535866753" observedRunningTime="2025-09-29 22:47:10.348282468 +0000 UTC m=+1234.658571271" watchObservedRunningTime="2025-09-29 22:47:10.355655101 +0000 UTC m=+1234.665943904" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.369665 4922 scope.go:117] "RemoveContainer" containerID="cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.398763 4922 scope.go:117] "RemoveContainer" containerID="81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.437603 4922 scope.go:117] "RemoveContainer" containerID="5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.465477 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-run-httpd\") pod \"316ae59b-05c9-4efe-8447-1050e62719f6\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.465518 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-config-data\") pod \"316ae59b-05c9-4efe-8447-1050e62719f6\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.465665 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shbcq\" (UniqueName: \"kubernetes.io/projected/316ae59b-05c9-4efe-8447-1050e62719f6-kube-api-access-shbcq\") pod \"316ae59b-05c9-4efe-8447-1050e62719f6\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.465724 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-log-httpd\") pod \"316ae59b-05c9-4efe-8447-1050e62719f6\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.466344 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-scripts\") pod \"316ae59b-05c9-4efe-8447-1050e62719f6\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.466390 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-sg-core-conf-yaml\") pod \"316ae59b-05c9-4efe-8447-1050e62719f6\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.466442 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-combined-ca-bundle\") pod \"316ae59b-05c9-4efe-8447-1050e62719f6\" (UID: \"316ae59b-05c9-4efe-8447-1050e62719f6\") " Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.466752 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "316ae59b-05c9-4efe-8447-1050e62719f6" (UID: "316ae59b-05c9-4efe-8447-1050e62719f6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.467207 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "316ae59b-05c9-4efe-8447-1050e62719f6" (UID: "316ae59b-05c9-4efe-8447-1050e62719f6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.473643 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.473833 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/316ae59b-05c9-4efe-8447-1050e62719f6-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.479165 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/316ae59b-05c9-4efe-8447-1050e62719f6-kube-api-access-shbcq" (OuterVolumeSpecName: "kube-api-access-shbcq") pod "316ae59b-05c9-4efe-8447-1050e62719f6" (UID: "316ae59b-05c9-4efe-8447-1050e62719f6"). InnerVolumeSpecName "kube-api-access-shbcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.493005 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-scripts" (OuterVolumeSpecName: "scripts") pod "316ae59b-05c9-4efe-8447-1050e62719f6" (UID: "316ae59b-05c9-4efe-8447-1050e62719f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.526928 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "316ae59b-05c9-4efe-8447-1050e62719f6" (UID: "316ae59b-05c9-4efe-8447-1050e62719f6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.575775 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shbcq\" (UniqueName: \"kubernetes.io/projected/316ae59b-05c9-4efe-8447-1050e62719f6-kube-api-access-shbcq\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.576087 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.576097 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.584448 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "316ae59b-05c9-4efe-8447-1050e62719f6" (UID: "316ae59b-05c9-4efe-8447-1050e62719f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.614990 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-config-data" (OuterVolumeSpecName: "config-data") pod "316ae59b-05c9-4efe-8447-1050e62719f6" (UID: "316ae59b-05c9-4efe-8447-1050e62719f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.659311 4922 scope.go:117] "RemoveContainer" containerID="add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3" Sep 29 22:47:10 crc kubenswrapper[4922]: E0929 22:47:10.660020 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3\": container with ID starting with add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3 not found: ID does not exist" containerID="add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.660048 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3"} err="failed to get container status \"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3\": rpc error: code = NotFound desc = could not find container \"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3\": container with ID starting with add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3 not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.660074 4922 scope.go:117] "RemoveContainer" containerID="cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec" Sep 29 22:47:10 crc kubenswrapper[4922]: E0929 22:47:10.660382 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec\": container with ID starting with cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec not found: ID does not exist" containerID="cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.660415 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec"} err="failed to get container status \"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec\": rpc error: code = NotFound desc = could not find container \"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec\": container with ID starting with cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.660427 4922 scope.go:117] "RemoveContainer" containerID="81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f" Sep 29 22:47:10 crc kubenswrapper[4922]: E0929 22:47:10.661669 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f\": container with ID starting with 81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f not found: ID does not exist" containerID="81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.661698 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f"} err="failed to get container status \"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f\": rpc error: code = NotFound desc = could not find container \"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f\": container with ID starting with 81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.661714 4922 scope.go:117] "RemoveContainer" containerID="5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44" Sep 29 22:47:10 crc kubenswrapper[4922]: E0929 22:47:10.661985 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44\": container with ID starting with 5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44 not found: ID does not exist" containerID="5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662003 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44"} err="failed to get container status \"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44\": rpc error: code = NotFound desc = could not find container \"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44\": container with ID starting with 5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44 not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662015 4922 scope.go:117] "RemoveContainer" containerID="add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662173 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3"} err="failed to get container status \"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3\": rpc error: code = NotFound desc = could not find container \"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3\": container with ID starting with add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3 not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662188 4922 scope.go:117] "RemoveContainer" containerID="cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662351 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec"} err="failed to get container status \"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec\": rpc error: code = NotFound desc = could not find container \"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec\": container with ID starting with cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662374 4922 scope.go:117] "RemoveContainer" containerID="81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662614 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f"} err="failed to get container status \"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f\": rpc error: code = NotFound desc = could not find container \"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f\": container with ID starting with 81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662631 4922 scope.go:117] "RemoveContainer" containerID="5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662774 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44"} err="failed to get container status \"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44\": rpc error: code = NotFound desc = could not find container \"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44\": container with ID starting with 5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44 not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662791 4922 scope.go:117] "RemoveContainer" containerID="add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662940 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3"} err="failed to get container status \"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3\": rpc error: code = NotFound desc = could not find container \"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3\": container with ID starting with add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3 not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.662956 4922 scope.go:117] "RemoveContainer" containerID="cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.663204 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec"} err="failed to get container status \"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec\": rpc error: code = NotFound desc = could not find container \"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec\": container with ID starting with cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.663227 4922 scope.go:117] "RemoveContainer" containerID="81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.663514 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f"} err="failed to get container status \"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f\": rpc error: code = NotFound desc = could not find container \"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f\": container with ID starting with 81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.663540 4922 scope.go:117] "RemoveContainer" containerID="5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.663703 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44"} err="failed to get container status \"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44\": rpc error: code = NotFound desc = could not find container \"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44\": container with ID starting with 5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44 not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.663720 4922 scope.go:117] "RemoveContainer" containerID="add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.663848 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3"} err="failed to get container status \"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3\": rpc error: code = NotFound desc = could not find container \"add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3\": container with ID starting with add423ab3885a1c965b87516a8c5138e2efb8cc214e17319b9a85b006984fbc3 not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.663862 4922 scope.go:117] "RemoveContainer" containerID="cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.663989 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec"} err="failed to get container status \"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec\": rpc error: code = NotFound desc = could not find container \"cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec\": container with ID starting with cb6cc9eb78f3396f63e78221fa20779bba8826e2e7f781050321f39b8181d9ec not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.664011 4922 scope.go:117] "RemoveContainer" containerID="81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.664150 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f"} err="failed to get container status \"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f\": rpc error: code = NotFound desc = could not find container \"81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f\": container with ID starting with 81f68dc31a5ac00dc41d95c0785768cf5ae33d94415149826676e3e3bfa5472f not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.664164 4922 scope.go:117] "RemoveContainer" containerID="5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.664304 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44"} err="failed to get container status \"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44\": rpc error: code = NotFound desc = could not find container \"5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44\": container with ID starting with 5c9cc32a89ff3b3bdb0dabe9bc16da91b24241c31928e45a62e9cb6ad83b7a44 not found: ID does not exist" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.678318 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.678354 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ae59b-05c9-4efe-8447-1050e62719f6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.786427 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5fc58fc6cf-b5wq2"] Sep 29 22:47:10 crc kubenswrapper[4922]: W0929 22:47:10.804547 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6e07568_09d3_4a0f_a3b6_8b6df0f89cbb.slice/crio-b0be4d9f34ba48d67b765bde4ff298d050f698e4645dde38152948a53d2aeffe WatchSource:0}: Error finding container b0be4d9f34ba48d67b765bde4ff298d050f698e4645dde38152948a53d2aeffe: Status 404 returned error can't find the container with id b0be4d9f34ba48d67b765bde4ff298d050f698e4645dde38152948a53d2aeffe Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.965118 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.973639 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.984796 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:10 crc kubenswrapper[4922]: E0929 22:47:10.985286 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="proxy-httpd" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.985308 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="proxy-httpd" Sep 29 22:47:10 crc kubenswrapper[4922]: E0929 22:47:10.985330 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="ceilometer-notification-agent" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.985341 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="ceilometer-notification-agent" Sep 29 22:47:10 crc kubenswrapper[4922]: E0929 22:47:10.985358 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="sg-core" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.985369 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="sg-core" Sep 29 22:47:10 crc kubenswrapper[4922]: E0929 22:47:10.985411 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="ceilometer-central-agent" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.985420 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="ceilometer-central-agent" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.985675 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="ceilometer-central-agent" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.985707 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="proxy-httpd" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.985726 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="sg-core" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.985775 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" containerName="ceilometer-notification-agent" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.987967 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.992121 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.993150 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 22:47:10 crc kubenswrapper[4922]: I0929 22:47:10.993269 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.088709 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-log-httpd\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.088778 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.088812 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-run-httpd\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.088893 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-config-data\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.088921 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.088979 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtqzl\" (UniqueName: \"kubernetes.io/projected/8351f6d6-d2ef-43bd-91de-181c48f712ac-kube-api-access-xtqzl\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.089048 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-scripts\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.190627 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtqzl\" (UniqueName: \"kubernetes.io/projected/8351f6d6-d2ef-43bd-91de-181c48f712ac-kube-api-access-xtqzl\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.190744 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-scripts\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.191036 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-log-httpd\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.191209 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.191241 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-run-httpd\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.191295 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-config-data\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.191320 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.191632 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-log-httpd\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.193364 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-run-httpd\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.194869 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-config-data\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.195195 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-scripts\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.196085 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.196923 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.208507 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtqzl\" (UniqueName: \"kubernetes.io/projected/8351f6d6-d2ef-43bd-91de-181c48f712ac-kube-api-access-xtqzl\") pod \"ceilometer-0\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.320217 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.350476 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" event={"ID":"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb","Type":"ContainerStarted","Data":"68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0"} Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.350540 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" event={"ID":"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb","Type":"ContainerStarted","Data":"108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702"} Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.350555 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" event={"ID":"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb","Type":"ContainerStarted","Data":"b0be4d9f34ba48d67b765bde4ff298d050f698e4645dde38152948a53d2aeffe"} Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.360021 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerName="cinder-api-log" containerID="cri-o://0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5" gracePeriod=30 Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.360354 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerName="cinder-api" containerID="cri-o://e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c" gracePeriod=30 Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.360937 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.360967 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.391623 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" podStartSLOduration=2.391604286 podStartE2EDuration="2.391604286s" podCreationTimestamp="2025-09-29 22:47:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:47:11.389198497 +0000 UTC m=+1235.699487320" watchObservedRunningTime="2025-09-29 22:47:11.391604286 +0000 UTC m=+1235.701893099" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.886631 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 22:47:11 crc kubenswrapper[4922]: I0929 22:47:11.927332 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.040916 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.206140 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data\") pod \"62c8d811-f867-4ebf-8a23-bc9958b082fe\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.206243 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62c8d811-f867-4ebf-8a23-bc9958b082fe-etc-machine-id\") pod \"62c8d811-f867-4ebf-8a23-bc9958b082fe\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.206269 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-combined-ca-bundle\") pod \"62c8d811-f867-4ebf-8a23-bc9958b082fe\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.206345 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data-custom\") pod \"62c8d811-f867-4ebf-8a23-bc9958b082fe\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.206413 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-scripts\") pod \"62c8d811-f867-4ebf-8a23-bc9958b082fe\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.206449 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c8d811-f867-4ebf-8a23-bc9958b082fe-logs\") pod \"62c8d811-f867-4ebf-8a23-bc9958b082fe\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.206473 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4b6c\" (UniqueName: \"kubernetes.io/projected/62c8d811-f867-4ebf-8a23-bc9958b082fe-kube-api-access-m4b6c\") pod \"62c8d811-f867-4ebf-8a23-bc9958b082fe\" (UID: \"62c8d811-f867-4ebf-8a23-bc9958b082fe\") " Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.208169 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62c8d811-f867-4ebf-8a23-bc9958b082fe-logs" (OuterVolumeSpecName: "logs") pod "62c8d811-f867-4ebf-8a23-bc9958b082fe" (UID: "62c8d811-f867-4ebf-8a23-bc9958b082fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.209259 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/62c8d811-f867-4ebf-8a23-bc9958b082fe-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "62c8d811-f867-4ebf-8a23-bc9958b082fe" (UID: "62c8d811-f867-4ebf-8a23-bc9958b082fe"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.216955 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c8d811-f867-4ebf-8a23-bc9958b082fe-kube-api-access-m4b6c" (OuterVolumeSpecName: "kube-api-access-m4b6c") pod "62c8d811-f867-4ebf-8a23-bc9958b082fe" (UID: "62c8d811-f867-4ebf-8a23-bc9958b082fe"). InnerVolumeSpecName "kube-api-access-m4b6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.217134 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "62c8d811-f867-4ebf-8a23-bc9958b082fe" (UID: "62c8d811-f867-4ebf-8a23-bc9958b082fe"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.218013 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-scripts" (OuterVolumeSpecName: "scripts") pod "62c8d811-f867-4ebf-8a23-bc9958b082fe" (UID: "62c8d811-f867-4ebf-8a23-bc9958b082fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.256277 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62c8d811-f867-4ebf-8a23-bc9958b082fe" (UID: "62c8d811-f867-4ebf-8a23-bc9958b082fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.283557 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data" (OuterVolumeSpecName: "config-data") pod "62c8d811-f867-4ebf-8a23-bc9958b082fe" (UID: "62c8d811-f867-4ebf-8a23-bc9958b082fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.308657 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4b6c\" (UniqueName: \"kubernetes.io/projected/62c8d811-f867-4ebf-8a23-bc9958b082fe-kube-api-access-m4b6c\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.308688 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.308699 4922 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62c8d811-f867-4ebf-8a23-bc9958b082fe-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.308707 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.308719 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.308728 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62c8d811-f867-4ebf-8a23-bc9958b082fe-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.308736 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62c8d811-f867-4ebf-8a23-bc9958b082fe-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.372187 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerStarted","Data":"af198c0c1ac22ce178a8f907048bb448a6d9af3779a2161d6e4ac6a062cfc7fe"} Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.374498 4922 generic.go:334] "Generic (PLEG): container finished" podID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerID="e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c" exitCode=0 Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.374527 4922 generic.go:334] "Generic (PLEG): container finished" podID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerID="0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5" exitCode=143 Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.374723 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"62c8d811-f867-4ebf-8a23-bc9958b082fe","Type":"ContainerDied","Data":"e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c"} Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.374767 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"62c8d811-f867-4ebf-8a23-bc9958b082fe","Type":"ContainerDied","Data":"0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5"} Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.374787 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"62c8d811-f867-4ebf-8a23-bc9958b082fe","Type":"ContainerDied","Data":"b55f696a20b1b6046fb4f8f7b5c381f3665bb8118297a2aca50c96bb4dbd0159"} Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.374808 4922 scope.go:117] "RemoveContainer" containerID="e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.374734 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.408757 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.420692 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.420830 4922 scope.go:117] "RemoveContainer" containerID="0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.434174 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="316ae59b-05c9-4efe-8447-1050e62719f6" path="/var/lib/kubelet/pods/316ae59b-05c9-4efe-8447-1050e62719f6/volumes" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.435223 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c8d811-f867-4ebf-8a23-bc9958b082fe" path="/var/lib/kubelet/pods/62c8d811-f867-4ebf-8a23-bc9958b082fe/volumes" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.448718 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:47:12 crc kubenswrapper[4922]: E0929 22:47:12.449268 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerName="cinder-api-log" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.449329 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerName="cinder-api-log" Sep 29 22:47:12 crc kubenswrapper[4922]: E0929 22:47:12.449424 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerName="cinder-api" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.449476 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerName="cinder-api" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.449714 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerName="cinder-api" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.449773 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c8d811-f867-4ebf-8a23-bc9958b082fe" containerName="cinder-api-log" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.456540 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.463806 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.471363 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.471572 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.471719 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.562541 4922 scope.go:117] "RemoveContainer" containerID="e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c" Sep 29 22:47:12 crc kubenswrapper[4922]: E0929 22:47:12.566479 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c\": container with ID starting with e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c not found: ID does not exist" containerID="e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.566513 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c"} err="failed to get container status \"e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c\": rpc error: code = NotFound desc = could not find container \"e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c\": container with ID starting with e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c not found: ID does not exist" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.566536 4922 scope.go:117] "RemoveContainer" containerID="0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5" Sep 29 22:47:12 crc kubenswrapper[4922]: E0929 22:47:12.569132 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5\": container with ID starting with 0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5 not found: ID does not exist" containerID="0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.569159 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5"} err="failed to get container status \"0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5\": rpc error: code = NotFound desc = could not find container \"0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5\": container with ID starting with 0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5 not found: ID does not exist" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.569190 4922 scope.go:117] "RemoveContainer" containerID="e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.573729 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c"} err="failed to get container status \"e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c\": rpc error: code = NotFound desc = could not find container \"e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c\": container with ID starting with e62e80ef24b1b3e84d6b5a806366dc772a188829f557e068c563938d1c39415c not found: ID does not exist" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.573767 4922 scope.go:117] "RemoveContainer" containerID="0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.575533 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5"} err="failed to get container status \"0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5\": rpc error: code = NotFound desc = could not find container \"0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5\": container with ID starting with 0211553a60ede2f82826ef9f5d7762fe9e6737f9283173e29bf843c3845a22e5 not found: ID does not exist" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.612291 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.612362 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.612384 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data-custom\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.612432 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8788e59c-0cd3-43c5-8591-d452f9cb083a-logs\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.612474 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-scripts\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.612518 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.612557 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4frms\" (UniqueName: \"kubernetes.io/projected/8788e59c-0cd3-43c5-8591-d452f9cb083a-kube-api-access-4frms\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.612573 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.612597 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8788e59c-0cd3-43c5-8591-d452f9cb083a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.715122 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-scripts\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.715195 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.715243 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4frms\" (UniqueName: \"kubernetes.io/projected/8788e59c-0cd3-43c5-8591-d452f9cb083a-kube-api-access-4frms\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.715264 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.715293 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8788e59c-0cd3-43c5-8591-d452f9cb083a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.715317 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.715361 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.715381 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data-custom\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.715431 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8788e59c-0cd3-43c5-8591-d452f9cb083a-logs\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.716429 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8788e59c-0cd3-43c5-8591-d452f9cb083a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.716455 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8788e59c-0cd3-43c5-8591-d452f9cb083a-logs\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.719791 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.720572 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.723436 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.725956 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.734573 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-scripts\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.745832 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4frms\" (UniqueName: \"kubernetes.io/projected/8788e59c-0cd3-43c5-8591-d452f9cb083a-kube-api-access-4frms\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.757988 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data-custom\") pod \"cinder-api-0\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " pod="openstack/cinder-api-0" Sep 29 22:47:12 crc kubenswrapper[4922]: I0929 22:47:12.883116 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 22:47:13 crc kubenswrapper[4922]: I0929 22:47:13.389011 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerStarted","Data":"764ff50f189b795358b51e2b6158737a340613e08dca8ab773b7a1a1e6b8364f"} Sep 29 22:47:13 crc kubenswrapper[4922]: I0929 22:47:13.404126 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:47:14 crc kubenswrapper[4922]: I0929 22:47:14.409738 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerStarted","Data":"be46172238842a43b868dab4364dec704602549bd52052875036c8c3d634f2fe"} Sep 29 22:47:14 crc kubenswrapper[4922]: I0929 22:47:14.410348 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerStarted","Data":"36819f432e851c3dfc1f8df455b40743869faa73839066167e6d65a786128da1"} Sep 29 22:47:14 crc kubenswrapper[4922]: I0929 22:47:14.412619 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8788e59c-0cd3-43c5-8591-d452f9cb083a","Type":"ContainerStarted","Data":"1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe"} Sep 29 22:47:14 crc kubenswrapper[4922]: I0929 22:47:14.412658 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8788e59c-0cd3-43c5-8591-d452f9cb083a","Type":"ContainerStarted","Data":"2265d26ef85dca95044cf74f9eb57eb6ea2e6da0b1454dc245a83a2f1f18cec9"} Sep 29 22:47:15 crc kubenswrapper[4922]: I0929 22:47:15.327558 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:15 crc kubenswrapper[4922]: I0929 22:47:15.443002 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8788e59c-0cd3-43c5-8591-d452f9cb083a","Type":"ContainerStarted","Data":"e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1"} Sep 29 22:47:15 crc kubenswrapper[4922]: I0929 22:47:15.444176 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 22:47:15 crc kubenswrapper[4922]: I0929 22:47:15.463066 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.463050093 podStartE2EDuration="3.463050093s" podCreationTimestamp="2025-09-29 22:47:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:47:15.460693075 +0000 UTC m=+1239.770981888" watchObservedRunningTime="2025-09-29 22:47:15.463050093 +0000 UTC m=+1239.773338906" Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.480882 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="ceilometer-central-agent" containerID="cri-o://764ff50f189b795358b51e2b6158737a340613e08dca8ab773b7a1a1e6b8364f" gracePeriod=30 Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.481304 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerStarted","Data":"5d13923b4927c8ebf0c78ca49bd3b5ff01153c8d1f4308adc56c02c33104c772"} Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.481336 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.481631 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="proxy-httpd" containerID="cri-o://5d13923b4927c8ebf0c78ca49bd3b5ff01153c8d1f4308adc56c02c33104c772" gracePeriod=30 Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.481669 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="sg-core" containerID="cri-o://be46172238842a43b868dab4364dec704602549bd52052875036c8c3d634f2fe" gracePeriod=30 Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.481700 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="ceilometer-notification-agent" containerID="cri-o://36819f432e851c3dfc1f8df455b40743869faa73839066167e6d65a786128da1" gracePeriod=30 Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.525997 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.904693907 podStartE2EDuration="6.525979967s" podCreationTimestamp="2025-09-29 22:47:10 +0000 UTC" firstStartedPulling="2025-09-29 22:47:11.939589028 +0000 UTC m=+1236.249877841" lastFinishedPulling="2025-09-29 22:47:15.560875088 +0000 UTC m=+1239.871163901" observedRunningTime="2025-09-29 22:47:16.521786873 +0000 UTC m=+1240.832075686" watchObservedRunningTime="2025-09-29 22:47:16.525979967 +0000 UTC m=+1240.836268780" Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.842557 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.907623 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-f5h89"] Sep 29 22:47:16 crc kubenswrapper[4922]: I0929 22:47:16.908435 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" podUID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerName="dnsmasq-dns" containerID="cri-o://a98166b4f4d9f2098a6ade5bbf2f9d1adec475ca2386cdd6e37391dc6c2c2b26" gracePeriod=10 Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.188116 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.233584 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.492155 4922 generic.go:334] "Generic (PLEG): container finished" podID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerID="5d13923b4927c8ebf0c78ca49bd3b5ff01153c8d1f4308adc56c02c33104c772" exitCode=0 Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.492193 4922 generic.go:334] "Generic (PLEG): container finished" podID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerID="be46172238842a43b868dab4364dec704602549bd52052875036c8c3d634f2fe" exitCode=2 Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.492206 4922 generic.go:334] "Generic (PLEG): container finished" podID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerID="36819f432e851c3dfc1f8df455b40743869faa73839066167e6d65a786128da1" exitCode=0 Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.492232 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerDied","Data":"5d13923b4927c8ebf0c78ca49bd3b5ff01153c8d1f4308adc56c02c33104c772"} Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.492281 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerDied","Data":"be46172238842a43b868dab4364dec704602549bd52052875036c8c3d634f2fe"} Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.492294 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerDied","Data":"36819f432e851c3dfc1f8df455b40743869faa73839066167e6d65a786128da1"} Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.494900 4922 generic.go:334] "Generic (PLEG): container finished" podID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerID="a98166b4f4d9f2098a6ade5bbf2f9d1adec475ca2386cdd6e37391dc6c2c2b26" exitCode=0 Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.495112 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerName="cinder-scheduler" containerID="cri-o://02e9e68cd12d912f991f8675b0b90408a5423ec37e68999040cb666a8a3e2339" gracePeriod=30 Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.495449 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" event={"ID":"934e0cb8-cc09-4c8d-9e74-938918b3fb3d","Type":"ContainerDied","Data":"a98166b4f4d9f2098a6ade5bbf2f9d1adec475ca2386cdd6e37391dc6c2c2b26"} Sep 29 22:47:17 crc kubenswrapper[4922]: I0929 22:47:17.496573 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerName="probe" containerID="cri-o://f9b7b248b12398045d4d9c76d0495a3026675be48141a60602d16e0e54ed8b37" gracePeriod=30 Sep 29 22:47:18 crc kubenswrapper[4922]: I0929 22:47:18.209332 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" podUID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.155:5353: connect: connection refused" Sep 29 22:47:18 crc kubenswrapper[4922]: I0929 22:47:18.248610 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:47:18 crc kubenswrapper[4922]: I0929 22:47:18.507227 4922 generic.go:334] "Generic (PLEG): container finished" podID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerID="f9b7b248b12398045d4d9c76d0495a3026675be48141a60602d16e0e54ed8b37" exitCode=0 Sep 29 22:47:18 crc kubenswrapper[4922]: I0929 22:47:18.507582 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0","Type":"ContainerDied","Data":"f9b7b248b12398045d4d9c76d0495a3026675be48141a60602d16e0e54ed8b37"} Sep 29 22:47:19 crc kubenswrapper[4922]: I0929 22:47:19.519856 4922 generic.go:334] "Generic (PLEG): container finished" podID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerID="764ff50f189b795358b51e2b6158737a340613e08dca8ab773b7a1a1e6b8364f" exitCode=0 Sep 29 22:47:19 crc kubenswrapper[4922]: I0929 22:47:19.519895 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerDied","Data":"764ff50f189b795358b51e2b6158737a340613e08dca8ab773b7a1a1e6b8364f"} Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.212414 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.213862 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.533610 4922 generic.go:334] "Generic (PLEG): container finished" podID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerID="02e9e68cd12d912f991f8675b0b90408a5423ec37e68999040cb666a8a3e2339" exitCode=0 Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.533697 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0","Type":"ContainerDied","Data":"02e9e68cd12d912f991f8675b0b90408a5423ec37e68999040cb666a8a3e2339"} Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.582134 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-pl8xb"] Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.588357 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pl8xb" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.597383 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-pl8xb"] Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.660497 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-rjc6l"] Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.661825 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rjc6l" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.662734 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jh5fz\" (UniqueName: \"kubernetes.io/projected/d3f528ca-b48a-4e5d-b801-620778a59ec6-kube-api-access-jh5fz\") pod \"nova-api-db-create-pl8xb\" (UID: \"d3f528ca-b48a-4e5d-b801-620778a59ec6\") " pod="openstack/nova-api-db-create-pl8xb" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.670621 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-rjc6l"] Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.760019 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-8cxz7"] Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.761266 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8cxz7" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.763999 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jh5fz\" (UniqueName: \"kubernetes.io/projected/d3f528ca-b48a-4e5d-b801-620778a59ec6-kube-api-access-jh5fz\") pod \"nova-api-db-create-pl8xb\" (UID: \"d3f528ca-b48a-4e5d-b801-620778a59ec6\") " pod="openstack/nova-api-db-create-pl8xb" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.764063 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck8pz\" (UniqueName: \"kubernetes.io/projected/df501d92-801d-4caa-8d1e-da48b45182cf-kube-api-access-ck8pz\") pod \"nova-cell0-db-create-rjc6l\" (UID: \"df501d92-801d-4caa-8d1e-da48b45182cf\") " pod="openstack/nova-cell0-db-create-rjc6l" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.771270 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-8cxz7"] Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.799693 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.805956 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jh5fz\" (UniqueName: \"kubernetes.io/projected/d3f528ca-b48a-4e5d-b801-620778a59ec6-kube-api-access-jh5fz\") pod \"nova-api-db-create-pl8xb\" (UID: \"d3f528ca-b48a-4e5d-b801-620778a59ec6\") " pod="openstack/nova-api-db-create-pl8xb" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.852607 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-54dbcc9b8d-c5whm"] Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.855450 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-54dbcc9b8d-c5whm" podUID="c2331dae-0582-4728-8c1a-304d087ccf91" containerName="neutron-api" containerID="cri-o://8ba9ff1c3b417ba93c5589b96fa732dc7aeb23ff636b44c44c80eb7b3db4a482" gracePeriod=30 Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.855814 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-54dbcc9b8d-c5whm" podUID="c2331dae-0582-4728-8c1a-304d087ccf91" containerName="neutron-httpd" containerID="cri-o://96e9425ddd60a2bf1a86df0f8f8a3afe27afdd3a1fc0d9e04d0422fb498c8e47" gracePeriod=30 Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.866401 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck8pz\" (UniqueName: \"kubernetes.io/projected/df501d92-801d-4caa-8d1e-da48b45182cf-kube-api-access-ck8pz\") pod \"nova-cell0-db-create-rjc6l\" (UID: \"df501d92-801d-4caa-8d1e-da48b45182cf\") " pod="openstack/nova-cell0-db-create-rjc6l" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.866498 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8zjp\" (UniqueName: \"kubernetes.io/projected/755c37dc-48a9-4941-8410-1832fb4a78e8-kube-api-access-n8zjp\") pod \"nova-cell1-db-create-8cxz7\" (UID: \"755c37dc-48a9-4941-8410-1832fb4a78e8\") " pod="openstack/nova-cell1-db-create-8cxz7" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.884579 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck8pz\" (UniqueName: \"kubernetes.io/projected/df501d92-801d-4caa-8d1e-da48b45182cf-kube-api-access-ck8pz\") pod \"nova-cell0-db-create-rjc6l\" (UID: \"df501d92-801d-4caa-8d1e-da48b45182cf\") " pod="openstack/nova-cell0-db-create-rjc6l" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.922207 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pl8xb" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.968412 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8zjp\" (UniqueName: \"kubernetes.io/projected/755c37dc-48a9-4941-8410-1832fb4a78e8-kube-api-access-n8zjp\") pod \"nova-cell1-db-create-8cxz7\" (UID: \"755c37dc-48a9-4941-8410-1832fb4a78e8\") " pod="openstack/nova-cell1-db-create-8cxz7" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.983441 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8zjp\" (UniqueName: \"kubernetes.io/projected/755c37dc-48a9-4941-8410-1832fb4a78e8-kube-api-access-n8zjp\") pod \"nova-cell1-db-create-8cxz7\" (UID: \"755c37dc-48a9-4941-8410-1832fb4a78e8\") " pod="openstack/nova-cell1-db-create-8cxz7" Sep 29 22:47:20 crc kubenswrapper[4922]: I0929 22:47:20.985399 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rjc6l" Sep 29 22:47:21 crc kubenswrapper[4922]: I0929 22:47:21.086856 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8cxz7" Sep 29 22:47:21 crc kubenswrapper[4922]: I0929 22:47:21.545734 4922 generic.go:334] "Generic (PLEG): container finished" podID="c2331dae-0582-4728-8c1a-304d087ccf91" containerID="96e9425ddd60a2bf1a86df0f8f8a3afe27afdd3a1fc0d9e04d0422fb498c8e47" exitCode=0 Sep 29 22:47:21 crc kubenswrapper[4922]: I0929 22:47:21.545784 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54dbcc9b8d-c5whm" event={"ID":"c2331dae-0582-4728-8c1a-304d087ccf91","Type":"ContainerDied","Data":"96e9425ddd60a2bf1a86df0f8f8a3afe27afdd3a1fc0d9e04d0422fb498c8e47"} Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.017083 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.115271 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-combined-ca-bundle\") pod \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.115787 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data\") pod \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.115902 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6ps9\" (UniqueName: \"kubernetes.io/projected/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-kube-api-access-g6ps9\") pod \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.115992 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-etc-machine-id\") pod \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.116768 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-scripts\") pod \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.117009 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data-custom\") pod \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\" (UID: \"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.116073 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" (UID: "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.118069 4922 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.125679 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-kube-api-access-g6ps9" (OuterVolumeSpecName: "kube-api-access-g6ps9") pod "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" (UID: "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0"). InnerVolumeSpecName "kube-api-access-g6ps9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.128792 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-scripts" (OuterVolumeSpecName: "scripts") pod "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" (UID: "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.129166 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" (UID: "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.219962 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.219987 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6ps9\" (UniqueName: \"kubernetes.io/projected/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-kube-api-access-g6ps9\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.219997 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.237579 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" (UID: "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.249453 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data" (OuterVolumeSpecName: "config-data") pod "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" (UID: "afdd92a5-3c71-42f2-9f4e-8aca550d6bd0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.273436 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.274655 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.321172 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-swift-storage-0\") pod \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.321248 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-scripts\") pod \"8351f6d6-d2ef-43bd-91de-181c48f712ac\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.321271 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-run-httpd\") pod \"8351f6d6-d2ef-43bd-91de-181c48f712ac\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.321340 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtqzl\" (UniqueName: \"kubernetes.io/projected/8351f6d6-d2ef-43bd-91de-181c48f712ac-kube-api-access-xtqzl\") pod \"8351f6d6-d2ef-43bd-91de-181c48f712ac\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.321486 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-config-data\") pod \"8351f6d6-d2ef-43bd-91de-181c48f712ac\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.324730 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-nb\") pod \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.324775 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmzsc\" (UniqueName: \"kubernetes.io/projected/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-kube-api-access-zmzsc\") pod \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.324818 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-svc\") pod \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.324833 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-config\") pod \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.324853 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-log-httpd\") pod \"8351f6d6-d2ef-43bd-91de-181c48f712ac\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.324894 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-combined-ca-bundle\") pod \"8351f6d6-d2ef-43bd-91de-181c48f712ac\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.324960 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-sb\") pod \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\" (UID: \"934e0cb8-cc09-4c8d-9e74-938918b3fb3d\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.325002 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-sg-core-conf-yaml\") pod \"8351f6d6-d2ef-43bd-91de-181c48f712ac\" (UID: \"8351f6d6-d2ef-43bd-91de-181c48f712ac\") " Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.325385 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.325410 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.330366 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-kube-api-access-zmzsc" (OuterVolumeSpecName: "kube-api-access-zmzsc") pod "934e0cb8-cc09-4c8d-9e74-938918b3fb3d" (UID: "934e0cb8-cc09-4c8d-9e74-938918b3fb3d"). InnerVolumeSpecName "kube-api-access-zmzsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.330773 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8351f6d6-d2ef-43bd-91de-181c48f712ac" (UID: "8351f6d6-d2ef-43bd-91de-181c48f712ac"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.338886 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8351f6d6-d2ef-43bd-91de-181c48f712ac" (UID: "8351f6d6-d2ef-43bd-91de-181c48f712ac"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.343530 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-scripts" (OuterVolumeSpecName: "scripts") pod "8351f6d6-d2ef-43bd-91de-181c48f712ac" (UID: "8351f6d6-d2ef-43bd-91de-181c48f712ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.344025 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8351f6d6-d2ef-43bd-91de-181c48f712ac-kube-api-access-xtqzl" (OuterVolumeSpecName: "kube-api-access-xtqzl") pod "8351f6d6-d2ef-43bd-91de-181c48f712ac" (UID: "8351f6d6-d2ef-43bd-91de-181c48f712ac"). InnerVolumeSpecName "kube-api-access-xtqzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.431666 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtqzl\" (UniqueName: \"kubernetes.io/projected/8351f6d6-d2ef-43bd-91de-181c48f712ac-kube-api-access-xtqzl\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.431695 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmzsc\" (UniqueName: \"kubernetes.io/projected/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-kube-api-access-zmzsc\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.431705 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.431714 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.431723 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8351f6d6-d2ef-43bd-91de-181c48f712ac-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.469907 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-rjc6l"] Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.486527 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8351f6d6-d2ef-43bd-91de-181c48f712ac" (UID: "8351f6d6-d2ef-43bd-91de-181c48f712ac"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.497002 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-8cxz7"] Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.546529 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.562120 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "934e0cb8-cc09-4c8d-9e74-938918b3fb3d" (UID: "934e0cb8-cc09-4c8d-9e74-938918b3fb3d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.606441 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-config" (OuterVolumeSpecName: "config") pod "934e0cb8-cc09-4c8d-9e74-938918b3fb3d" (UID: "934e0cb8-cc09-4c8d-9e74-938918b3fb3d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.614989 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "934e0cb8-cc09-4c8d-9e74-938918b3fb3d" (UID: "934e0cb8-cc09-4c8d-9e74-938918b3fb3d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.619040 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"afdd92a5-3c71-42f2-9f4e-8aca550d6bd0","Type":"ContainerDied","Data":"9aa4d561c6f6f8e056bc239a75e5f7dd3286b6abbf5f5fbbac4a30c376a48b82"} Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.619091 4922 scope.go:117] "RemoveContainer" containerID="f9b7b248b12398045d4d9c76d0495a3026675be48141a60602d16e0e54ed8b37" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.619221 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.640552 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "934e0cb8-cc09-4c8d-9e74-938918b3fb3d" (UID: "934e0cb8-cc09-4c8d-9e74-938918b3fb3d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.652308 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.652337 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.652347 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.652359 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.660469 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-pl8xb"] Sep 29 22:47:23 crc kubenswrapper[4922]: W0929 22:47:23.665385 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3f528ca_b48a_4e5d_b801_620778a59ec6.slice/crio-ab354f9d0ccfbad164bbba1e24ddd899e139b086414eef154a67f1e6e11b4a5e WatchSource:0}: Error finding container ab354f9d0ccfbad164bbba1e24ddd899e139b086414eef154a67f1e6e11b4a5e: Status 404 returned error can't find the container with id ab354f9d0ccfbad164bbba1e24ddd899e139b086414eef154a67f1e6e11b4a5e Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.670765 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8351f6d6-d2ef-43bd-91de-181c48f712ac","Type":"ContainerDied","Data":"af198c0c1ac22ce178a8f907048bb448a6d9af3779a2161d6e4ac6a062cfc7fe"} Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.670882 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.678154 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "934e0cb8-cc09-4c8d-9e74-938918b3fb3d" (UID: "934e0cb8-cc09-4c8d-9e74-938918b3fb3d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.688419 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" event={"ID":"934e0cb8-cc09-4c8d-9e74-938918b3fb3d","Type":"ContainerDied","Data":"4ce3a7870161e86fba13809190df57f504e28fd14a712f769188647bcb983c4e"} Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.688523 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.692553 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-config-data" (OuterVolumeSpecName: "config-data") pod "8351f6d6-d2ef-43bd-91de-181c48f712ac" (UID: "8351f6d6-d2ef-43bd-91de-181c48f712ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.693329 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8351f6d6-d2ef-43bd-91de-181c48f712ac" (UID: "8351f6d6-d2ef-43bd-91de-181c48f712ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.705988 4922 generic.go:334] "Generic (PLEG): container finished" podID="c2331dae-0582-4728-8c1a-304d087ccf91" containerID="8ba9ff1c3b417ba93c5589b96fa732dc7aeb23ff636b44c44c80eb7b3db4a482" exitCode=0 Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.706047 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54dbcc9b8d-c5whm" event={"ID":"c2331dae-0582-4728-8c1a-304d087ccf91","Type":"ContainerDied","Data":"8ba9ff1c3b417ba93c5589b96fa732dc7aeb23ff636b44c44c80eb7b3db4a482"} Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.708526 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"25c9b137-8a15-477d-b87a-b4480c856551","Type":"ContainerStarted","Data":"e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0"} Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.711619 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rjc6l" event={"ID":"df501d92-801d-4caa-8d1e-da48b45182cf","Type":"ContainerStarted","Data":"e5f8e897f06c95678487550a4d47f59d5917b48ddfb5126bc11201f8611dcd6c"} Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.726817 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.511999597 podStartE2EDuration="15.726797623s" podCreationTimestamp="2025-09-29 22:47:08 +0000 UTC" firstStartedPulling="2025-09-29 22:47:09.532437029 +0000 UTC m=+1233.842725842" lastFinishedPulling="2025-09-29 22:47:22.747235045 +0000 UTC m=+1247.057523868" observedRunningTime="2025-09-29 22:47:23.722833525 +0000 UTC m=+1248.033122338" watchObservedRunningTime="2025-09-29 22:47:23.726797623 +0000 UTC m=+1248.037086436" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.771768 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.771799 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8351f6d6-d2ef-43bd-91de-181c48f712ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.771810 4922 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/934e0cb8-cc09-4c8d-9e74-938918b3fb3d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.780609 4922 scope.go:117] "RemoveContainer" containerID="02e9e68cd12d912f991f8675b0b90408a5423ec37e68999040cb666a8a3e2339" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.981878 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:47:23 crc kubenswrapper[4922]: I0929 22:47:23.990080 4922 scope.go:117] "RemoveContainer" containerID="5d13923b4927c8ebf0c78ca49bd3b5ff01153c8d1f4308adc56c02c33104c772" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.031341 4922 scope.go:117] "RemoveContainer" containerID="be46172238842a43b868dab4364dec704602549bd52052875036c8c3d634f2fe" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.036481 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.045410 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.055893 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-f5h89"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.072812 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-f5h89"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.074422 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.075346 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-combined-ca-bundle\") pod \"c2331dae-0582-4728-8c1a-304d087ccf91\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.075433 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-ovndb-tls-certs\") pod \"c2331dae-0582-4728-8c1a-304d087ccf91\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.075488 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-config\") pod \"c2331dae-0582-4728-8c1a-304d087ccf91\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.075581 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mvpp\" (UniqueName: \"kubernetes.io/projected/c2331dae-0582-4728-8c1a-304d087ccf91-kube-api-access-7mvpp\") pod \"c2331dae-0582-4728-8c1a-304d087ccf91\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.075654 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-httpd-config\") pod \"c2331dae-0582-4728-8c1a-304d087ccf91\" (UID: \"c2331dae-0582-4728-8c1a-304d087ccf91\") " Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076159 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerName="cinder-scheduler" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076183 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerName="cinder-scheduler" Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076205 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="proxy-httpd" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076213 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="proxy-httpd" Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076222 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2331dae-0582-4728-8c1a-304d087ccf91" containerName="neutron-api" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076260 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2331dae-0582-4728-8c1a-304d087ccf91" containerName="neutron-api" Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076271 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="sg-core" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076277 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="sg-core" Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076287 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerName="probe" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076293 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerName="probe" Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076301 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="ceilometer-notification-agent" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076307 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="ceilometer-notification-agent" Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076323 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2331dae-0582-4728-8c1a-304d087ccf91" containerName="neutron-httpd" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076329 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2331dae-0582-4728-8c1a-304d087ccf91" containerName="neutron-httpd" Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076337 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="ceilometer-central-agent" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076343 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="ceilometer-central-agent" Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076354 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerName="init" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076360 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerName="init" Sep 29 22:47:24 crc kubenswrapper[4922]: E0929 22:47:24.076378 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerName="dnsmasq-dns" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076396 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerName="dnsmasq-dns" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076562 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerName="dnsmasq-dns" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076575 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerName="probe" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076587 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="ceilometer-central-agent" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076598 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="ceilometer-notification-agent" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076606 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" containerName="cinder-scheduler" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076620 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="proxy-httpd" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076629 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" containerName="sg-core" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076635 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2331dae-0582-4728-8c1a-304d087ccf91" containerName="neutron-httpd" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.076648 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2331dae-0582-4728-8c1a-304d087ccf91" containerName="neutron-api" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.078164 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.082334 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.088732 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "c2331dae-0582-4728-8c1a-304d087ccf91" (UID: "c2331dae-0582-4728-8c1a-304d087ccf91"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.091042 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.091311 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2331dae-0582-4728-8c1a-304d087ccf91-kube-api-access-7mvpp" (OuterVolumeSpecName: "kube-api-access-7mvpp") pod "c2331dae-0582-4728-8c1a-304d087ccf91" (UID: "c2331dae-0582-4728-8c1a-304d087ccf91"). InnerVolumeSpecName "kube-api-access-7mvpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.096545 4922 scope.go:117] "RemoveContainer" containerID="36819f432e851c3dfc1f8df455b40743869faa73839066167e6d65a786128da1" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.098518 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.107198 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.115457 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.117615 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.121284 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.121482 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.121599 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.152529 4922 scope.go:117] "RemoveContainer" containerID="764ff50f189b795358b51e2b6158737a340613e08dca8ab773b7a1a1e6b8364f" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.171069 4922 scope.go:117] "RemoveContainer" containerID="a98166b4f4d9f2098a6ade5bbf2f9d1adec475ca2386cdd6e37391dc6c2c2b26" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.178670 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp78t\" (UniqueName: \"kubernetes.io/projected/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-kube-api-access-vp78t\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.178760 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.178787 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.178805 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.178861 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.178929 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-scripts\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.178993 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mvpp\" (UniqueName: \"kubernetes.io/projected/c2331dae-0582-4728-8c1a-304d087ccf91-kube-api-access-7mvpp\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.179011 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.198644 4922 scope.go:117] "RemoveContainer" containerID="f5d3801b7e8cbea28efd023c00a3eb05736c1c84ab301e46dded8aeb46c30e92" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.254599 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2331dae-0582-4728-8c1a-304d087ccf91" (UID: "c2331dae-0582-4728-8c1a-304d087ccf91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.266475 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-config" (OuterVolumeSpecName: "config") pod "c2331dae-0582-4728-8c1a-304d087ccf91" (UID: "c2331dae-0582-4728-8c1a-304d087ccf91"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.275252 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "c2331dae-0582-4728-8c1a-304d087ccf91" (UID: "c2331dae-0582-4728-8c1a-304d087ccf91"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.281529 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.281639 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.281668 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.281788 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.281843 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-scripts\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.281881 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xtxl\" (UniqueName: \"kubernetes.io/projected/4987c64c-7e63-4068-bf85-697d7c155bfc-kube-api-access-6xtxl\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.282074 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.282108 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-config-data\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.282175 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.282360 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.282523 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-scripts\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.282593 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-run-httpd\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.283025 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-log-httpd\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.283059 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp78t\" (UniqueName: \"kubernetes.io/projected/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-kube-api-access-vp78t\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.283171 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.283186 4922 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.283196 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c2331dae-0582-4728-8c1a-304d087ccf91-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.284821 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.285216 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.285884 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-scripts\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.286196 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.299658 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp78t\" (UniqueName: \"kubernetes.io/projected/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-kube-api-access-vp78t\") pod \"cinder-scheduler-0\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.385197 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-run-httpd\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.385268 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-log-httpd\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.385330 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-scripts\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.385350 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xtxl\" (UniqueName: \"kubernetes.io/projected/4987c64c-7e63-4068-bf85-697d7c155bfc-kube-api-access-6xtxl\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.385372 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.385406 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-config-data\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.385461 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.385751 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-run-httpd\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.386084 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-log-httpd\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.389766 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.390196 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-config-data\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.390641 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.403664 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-scripts\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.406197 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xtxl\" (UniqueName: \"kubernetes.io/projected/4987c64c-7e63-4068-bf85-697d7c155bfc-kube-api-access-6xtxl\") pod \"ceilometer-0\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.444632 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8351f6d6-d2ef-43bd-91de-181c48f712ac" path="/var/lib/kubelet/pods/8351f6d6-d2ef-43bd-91de-181c48f712ac/volumes" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.446742 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" path="/var/lib/kubelet/pods/934e0cb8-cc09-4c8d-9e74-938918b3fb3d/volumes" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.447997 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afdd92a5-3c71-42f2-9f4e-8aca550d6bd0" path="/var/lib/kubelet/pods/afdd92a5-3c71-42f2-9f4e-8aca550d6bd0/volumes" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.448663 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.451314 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.735956 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54dbcc9b8d-c5whm" event={"ID":"c2331dae-0582-4728-8c1a-304d087ccf91","Type":"ContainerDied","Data":"e48f6945cd05bf4bc4d065539efed61ac3bf62b1b97169255de847a552fdd5c6"} Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.736270 4922 scope.go:117] "RemoveContainer" containerID="96e9425ddd60a2bf1a86df0f8f8a3afe27afdd3a1fc0d9e04d0422fb498c8e47" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.736223 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54dbcc9b8d-c5whm" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.755057 4922 generic.go:334] "Generic (PLEG): container finished" podID="d3f528ca-b48a-4e5d-b801-620778a59ec6" containerID="ee663f58dc68ad4963e1dfe7771f40417aed4b76cbeb0f64725d0d1c76da2b25" exitCode=0 Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.755145 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pl8xb" event={"ID":"d3f528ca-b48a-4e5d-b801-620778a59ec6","Type":"ContainerDied","Data":"ee663f58dc68ad4963e1dfe7771f40417aed4b76cbeb0f64725d0d1c76da2b25"} Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.755173 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pl8xb" event={"ID":"d3f528ca-b48a-4e5d-b801-620778a59ec6","Type":"ContainerStarted","Data":"ab354f9d0ccfbad164bbba1e24ddd899e139b086414eef154a67f1e6e11b4a5e"} Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.768113 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-54dbcc9b8d-c5whm"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.770971 4922 generic.go:334] "Generic (PLEG): container finished" podID="df501d92-801d-4caa-8d1e-da48b45182cf" containerID="b81b755bcfce346f5f3e40095ef94c2f2a4850fd6853f4d78e6341fd3e03fa1a" exitCode=0 Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.771050 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rjc6l" event={"ID":"df501d92-801d-4caa-8d1e-da48b45182cf","Type":"ContainerDied","Data":"b81b755bcfce346f5f3e40095ef94c2f2a4850fd6853f4d78e6341fd3e03fa1a"} Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.779682 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-54dbcc9b8d-c5whm"] Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.793612 4922 scope.go:117] "RemoveContainer" containerID="8ba9ff1c3b417ba93c5589b96fa732dc7aeb23ff636b44c44c80eb7b3db4a482" Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.794306 4922 generic.go:334] "Generic (PLEG): container finished" podID="755c37dc-48a9-4941-8410-1832fb4a78e8" containerID="b0601e07e1cda9d47e12b57216a40dab9a4a196e2dbdbe101c52408ab8dbc673" exitCode=0 Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.794982 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8cxz7" event={"ID":"755c37dc-48a9-4941-8410-1832fb4a78e8","Type":"ContainerDied","Data":"b0601e07e1cda9d47e12b57216a40dab9a4a196e2dbdbe101c52408ab8dbc673"} Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.795040 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8cxz7" event={"ID":"755c37dc-48a9-4941-8410-1832fb4a78e8","Type":"ContainerStarted","Data":"4a7e365f1aa2c4fbe5e617ba7e59a69ff91a6d96c695b99df84a713af4792761"} Sep 29 22:47:24 crc kubenswrapper[4922]: W0929 22:47:24.977971 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4987c64c_7e63_4068_bf85_697d7c155bfc.slice/crio-a28f359e8af015d8e3ac11e21662961b996d08b44c1db8a6a0b042ce3924959f WatchSource:0}: Error finding container a28f359e8af015d8e3ac11e21662961b996d08b44c1db8a6a0b042ce3924959f: Status 404 returned error can't find the container with id a28f359e8af015d8e3ac11e21662961b996d08b44c1db8a6a0b042ce3924959f Sep 29 22:47:24 crc kubenswrapper[4922]: I0929 22:47:24.990582 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:25 crc kubenswrapper[4922]: I0929 22:47:25.048133 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:47:25 crc kubenswrapper[4922]: W0929 22:47:25.049350 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb88af2db_0d8c_4d78_b1d9_5fb3c28c5e7b.slice/crio-e51efd7d0a777295c574ae8ad22bfded8af0d1c2e9f8e4ff1fde65b287829a3d WatchSource:0}: Error finding container e51efd7d0a777295c574ae8ad22bfded8af0d1c2e9f8e4ff1fde65b287829a3d: Status 404 returned error can't find the container with id e51efd7d0a777295c574ae8ad22bfded8af0d1c2e9f8e4ff1fde65b287829a3d Sep 29 22:47:25 crc kubenswrapper[4922]: I0929 22:47:25.153948 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 22:47:25 crc kubenswrapper[4922]: I0929 22:47:25.805495 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerStarted","Data":"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17"} Sep 29 22:47:25 crc kubenswrapper[4922]: I0929 22:47:25.805808 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerStarted","Data":"a28f359e8af015d8e3ac11e21662961b996d08b44c1db8a6a0b042ce3924959f"} Sep 29 22:47:25 crc kubenswrapper[4922]: I0929 22:47:25.806959 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b","Type":"ContainerStarted","Data":"70b0fa4952e40bc0e0d7fd5d77a22557f9abd49c9cf9a6a2477a7399f2433c1f"} Sep 29 22:47:25 crc kubenswrapper[4922]: I0929 22:47:25.806996 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b","Type":"ContainerStarted","Data":"e51efd7d0a777295c574ae8ad22bfded8af0d1c2e9f8e4ff1fde65b287829a3d"} Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.223801 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rjc6l" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.253482 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8cxz7" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.309341 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pl8xb" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.333998 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck8pz\" (UniqueName: \"kubernetes.io/projected/df501d92-801d-4caa-8d1e-da48b45182cf-kube-api-access-ck8pz\") pod \"df501d92-801d-4caa-8d1e-da48b45182cf\" (UID: \"df501d92-801d-4caa-8d1e-da48b45182cf\") " Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.347521 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df501d92-801d-4caa-8d1e-da48b45182cf-kube-api-access-ck8pz" (OuterVolumeSpecName: "kube-api-access-ck8pz") pod "df501d92-801d-4caa-8d1e-da48b45182cf" (UID: "df501d92-801d-4caa-8d1e-da48b45182cf"). InnerVolumeSpecName "kube-api-access-ck8pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.435863 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2331dae-0582-4728-8c1a-304d087ccf91" path="/var/lib/kubelet/pods/c2331dae-0582-4728-8c1a-304d087ccf91/volumes" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.437513 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jh5fz\" (UniqueName: \"kubernetes.io/projected/d3f528ca-b48a-4e5d-b801-620778a59ec6-kube-api-access-jh5fz\") pod \"d3f528ca-b48a-4e5d-b801-620778a59ec6\" (UID: \"d3f528ca-b48a-4e5d-b801-620778a59ec6\") " Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.437645 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8zjp\" (UniqueName: \"kubernetes.io/projected/755c37dc-48a9-4941-8410-1832fb4a78e8-kube-api-access-n8zjp\") pod \"755c37dc-48a9-4941-8410-1832fb4a78e8\" (UID: \"755c37dc-48a9-4941-8410-1832fb4a78e8\") " Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.440713 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck8pz\" (UniqueName: \"kubernetes.io/projected/df501d92-801d-4caa-8d1e-da48b45182cf-kube-api-access-ck8pz\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.449027 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3f528ca-b48a-4e5d-b801-620778a59ec6-kube-api-access-jh5fz" (OuterVolumeSpecName: "kube-api-access-jh5fz") pod "d3f528ca-b48a-4e5d-b801-620778a59ec6" (UID: "d3f528ca-b48a-4e5d-b801-620778a59ec6"). InnerVolumeSpecName "kube-api-access-jh5fz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.449081 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/755c37dc-48a9-4941-8410-1832fb4a78e8-kube-api-access-n8zjp" (OuterVolumeSpecName: "kube-api-access-n8zjp") pod "755c37dc-48a9-4941-8410-1832fb4a78e8" (UID: "755c37dc-48a9-4941-8410-1832fb4a78e8"). InnerVolumeSpecName "kube-api-access-n8zjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.542145 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jh5fz\" (UniqueName: \"kubernetes.io/projected/d3f528ca-b48a-4e5d-b801-620778a59ec6-kube-api-access-jh5fz\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.542410 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8zjp\" (UniqueName: \"kubernetes.io/projected/755c37dc-48a9-4941-8410-1832fb4a78e8-kube-api-access-n8zjp\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.838121 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-rjc6l" event={"ID":"df501d92-801d-4caa-8d1e-da48b45182cf","Type":"ContainerDied","Data":"e5f8e897f06c95678487550a4d47f59d5917b48ddfb5126bc11201f8611dcd6c"} Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.838158 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-rjc6l" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.838184 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5f8e897f06c95678487550a4d47f59d5917b48ddfb5126bc11201f8611dcd6c" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.844669 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8cxz7" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.844671 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8cxz7" event={"ID":"755c37dc-48a9-4941-8410-1832fb4a78e8","Type":"ContainerDied","Data":"4a7e365f1aa2c4fbe5e617ba7e59a69ff91a6d96c695b99df84a713af4792761"} Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.844817 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a7e365f1aa2c4fbe5e617ba7e59a69ff91a6d96c695b99df84a713af4792761" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.847814 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerStarted","Data":"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e"} Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.862023 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-pl8xb" event={"ID":"d3f528ca-b48a-4e5d-b801-620778a59ec6","Type":"ContainerDied","Data":"ab354f9d0ccfbad164bbba1e24ddd899e139b086414eef154a67f1e6e11b4a5e"} Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.862065 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab354f9d0ccfbad164bbba1e24ddd899e139b086414eef154a67f1e6e11b4a5e" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.862135 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-pl8xb" Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.873713 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b","Type":"ContainerStarted","Data":"db4826cac698823aad07ded4c68c796267271768ac310ffcee02df8874d50b96"} Sep 29 22:47:26 crc kubenswrapper[4922]: I0929 22:47:26.927049 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.927022657 podStartE2EDuration="2.927022657s" podCreationTimestamp="2025-09-29 22:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:47:26.89205023 +0000 UTC m=+1251.202339043" watchObservedRunningTime="2025-09-29 22:47:26.927022657 +0000 UTC m=+1251.237311470" Sep 29 22:47:27 crc kubenswrapper[4922]: I0929 22:47:27.884854 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerStarted","Data":"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404"} Sep 29 22:47:28 crc kubenswrapper[4922]: I0929 22:47:28.209651 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-688c87cc99-f5h89" podUID="934e0cb8-cc09-4c8d-9e74-938918b3fb3d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.155:5353: i/o timeout" Sep 29 22:47:28 crc kubenswrapper[4922]: I0929 22:47:28.913160 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:47:28 crc kubenswrapper[4922]: I0929 22:47:28.913223 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:47:28 crc kubenswrapper[4922]: I0929 22:47:28.913272 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:47:28 crc kubenswrapper[4922]: I0929 22:47:28.914069 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2744d35a0efae3434bd56ec391b0648d3824ba8565228dfe9d3610ca7ee648f3"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:47:28 crc kubenswrapper[4922]: I0929 22:47:28.914143 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://2744d35a0efae3434bd56ec391b0648d3824ba8565228dfe9d3610ca7ee648f3" gracePeriod=600 Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.452866 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.579346 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.579602 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6316309d-31b2-4062-a285-322d33221ee6" containerName="glance-log" containerID="cri-o://1c96e92ffa0c3272cb8760ec8cc7d7c542955b5586ada5d9e51c0bc13a600399" gracePeriod=30 Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.579690 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="6316309d-31b2-4062-a285-322d33221ee6" containerName="glance-httpd" containerID="cri-o://06a3b9d4c3f527f22faa0d11a5b2108756dd731da4fd6b78f31da90cb6e66c13" gracePeriod=30 Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.901435 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="2744d35a0efae3434bd56ec391b0648d3824ba8565228dfe9d3610ca7ee648f3" exitCode=0 Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.901512 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"2744d35a0efae3434bd56ec391b0648d3824ba8565228dfe9d3610ca7ee648f3"} Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.901740 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"832e8949f1bfe9c9884bbbe72e8107f9a55a105f03d2155b45dfe20f0e514d26"} Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.901762 4922 scope.go:117] "RemoveContainer" containerID="92b5767336a72e147921a9d2961a6367ee20762375b4581c376088ef25b4feea" Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.904196 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerStarted","Data":"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5"} Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.904268 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.906212 4922 generic.go:334] "Generic (PLEG): container finished" podID="6316309d-31b2-4062-a285-322d33221ee6" containerID="1c96e92ffa0c3272cb8760ec8cc7d7c542955b5586ada5d9e51c0bc13a600399" exitCode=143 Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.906248 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6316309d-31b2-4062-a285-322d33221ee6","Type":"ContainerDied","Data":"1c96e92ffa0c3272cb8760ec8cc7d7c542955b5586ada5d9e51c0bc13a600399"} Sep 29 22:47:29 crc kubenswrapper[4922]: I0929 22:47:29.950806 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.671942681 podStartE2EDuration="5.950788898s" podCreationTimestamp="2025-09-29 22:47:24 +0000 UTC" firstStartedPulling="2025-09-29 22:47:24.998764357 +0000 UTC m=+1249.309053170" lastFinishedPulling="2025-09-29 22:47:29.277610584 +0000 UTC m=+1253.587899387" observedRunningTime="2025-09-29 22:47:29.941271712 +0000 UTC m=+1254.251560525" watchObservedRunningTime="2025-09-29 22:47:29.950788898 +0000 UTC m=+1254.261077711" Sep 29 22:47:30 crc kubenswrapper[4922]: I0929 22:47:30.382818 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:47:30 crc kubenswrapper[4922]: I0929 22:47:30.383286 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerName="glance-log" containerID="cri-o://c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525" gracePeriod=30 Sep 29 22:47:30 crc kubenswrapper[4922]: I0929 22:47:30.383477 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerName="glance-httpd" containerID="cri-o://d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70" gracePeriod=30 Sep 29 22:47:30 crc kubenswrapper[4922]: I0929 22:47:30.917141 4922 generic.go:334] "Generic (PLEG): container finished" podID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerID="c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525" exitCode=143 Sep 29 22:47:30 crc kubenswrapper[4922]: I0929 22:47:30.917423 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"898e76fa-e8bb-4354-802f-5a6f6c14c0b0","Type":"ContainerDied","Data":"c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525"} Sep 29 22:47:31 crc kubenswrapper[4922]: I0929 22:47:31.681164 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:31 crc kubenswrapper[4922]: I0929 22:47:31.924467 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="ceilometer-central-agent" containerID="cri-o://840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17" gracePeriod=30 Sep 29 22:47:31 crc kubenswrapper[4922]: I0929 22:47:31.924515 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="sg-core" containerID="cri-o://52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404" gracePeriod=30 Sep 29 22:47:31 crc kubenswrapper[4922]: I0929 22:47:31.924551 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="proxy-httpd" containerID="cri-o://33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5" gracePeriod=30 Sep 29 22:47:31 crc kubenswrapper[4922]: I0929 22:47:31.924551 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="ceilometer-notification-agent" containerID="cri-o://3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e" gracePeriod=30 Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.680608 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.880634 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-combined-ca-bundle\") pod \"4987c64c-7e63-4068-bf85-697d7c155bfc\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.880786 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-config-data\") pod \"4987c64c-7e63-4068-bf85-697d7c155bfc\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.880893 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-run-httpd\") pod \"4987c64c-7e63-4068-bf85-697d7c155bfc\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.880913 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-log-httpd\") pod \"4987c64c-7e63-4068-bf85-697d7c155bfc\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.880959 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xtxl\" (UniqueName: \"kubernetes.io/projected/4987c64c-7e63-4068-bf85-697d7c155bfc-kube-api-access-6xtxl\") pod \"4987c64c-7e63-4068-bf85-697d7c155bfc\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.880974 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-scripts\") pod \"4987c64c-7e63-4068-bf85-697d7c155bfc\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.881000 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-sg-core-conf-yaml\") pod \"4987c64c-7e63-4068-bf85-697d7c155bfc\" (UID: \"4987c64c-7e63-4068-bf85-697d7c155bfc\") " Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.881357 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4987c64c-7e63-4068-bf85-697d7c155bfc" (UID: "4987c64c-7e63-4068-bf85-697d7c155bfc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.882342 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4987c64c-7e63-4068-bf85-697d7c155bfc" (UID: "4987c64c-7e63-4068-bf85-697d7c155bfc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.896584 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-scripts" (OuterVolumeSpecName: "scripts") pod "4987c64c-7e63-4068-bf85-697d7c155bfc" (UID: "4987c64c-7e63-4068-bf85-697d7c155bfc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.900175 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4987c64c-7e63-4068-bf85-697d7c155bfc-kube-api-access-6xtxl" (OuterVolumeSpecName: "kube-api-access-6xtxl") pod "4987c64c-7e63-4068-bf85-697d7c155bfc" (UID: "4987c64c-7e63-4068-bf85-697d7c155bfc"). InnerVolumeSpecName "kube-api-access-6xtxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.908372 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4987c64c-7e63-4068-bf85-697d7c155bfc" (UID: "4987c64c-7e63-4068-bf85-697d7c155bfc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.953798 4922 generic.go:334] "Generic (PLEG): container finished" podID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerID="33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5" exitCode=0 Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.954111 4922 generic.go:334] "Generic (PLEG): container finished" podID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerID="52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404" exitCode=2 Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.954123 4922 generic.go:334] "Generic (PLEG): container finished" podID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerID="3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e" exitCode=0 Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.954130 4922 generic.go:334] "Generic (PLEG): container finished" podID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerID="840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17" exitCode=0 Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.953949 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerDied","Data":"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5"} Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.954204 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerDied","Data":"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404"} Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.954260 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerDied","Data":"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e"} Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.954273 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerDied","Data":"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17"} Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.954281 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4987c64c-7e63-4068-bf85-697d7c155bfc","Type":"ContainerDied","Data":"a28f359e8af015d8e3ac11e21662961b996d08b44c1db8a6a0b042ce3924959f"} Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.954305 4922 scope.go:117] "RemoveContainer" containerID="33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.954704 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.958802 4922 generic.go:334] "Generic (PLEG): container finished" podID="6316309d-31b2-4062-a285-322d33221ee6" containerID="06a3b9d4c3f527f22faa0d11a5b2108756dd731da4fd6b78f31da90cb6e66c13" exitCode=0 Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.958834 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6316309d-31b2-4062-a285-322d33221ee6","Type":"ContainerDied","Data":"06a3b9d4c3f527f22faa0d11a5b2108756dd731da4fd6b78f31da90cb6e66c13"} Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.984661 4922 scope.go:117] "RemoveContainer" containerID="52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.987462 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.987486 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4987c64c-7e63-4068-bf85-697d7c155bfc-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.987499 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xtxl\" (UniqueName: \"kubernetes.io/projected/4987c64c-7e63-4068-bf85-697d7c155bfc-kube-api-access-6xtxl\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.987511 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:32 crc kubenswrapper[4922]: I0929 22:47:32.987524 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.003779 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4987c64c-7e63-4068-bf85-697d7c155bfc" (UID: "4987c64c-7e63-4068-bf85-697d7c155bfc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.004853 4922 scope.go:117] "RemoveContainer" containerID="3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.023955 4922 scope.go:117] "RemoveContainer" containerID="840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.055932 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-config-data" (OuterVolumeSpecName: "config-data") pod "4987c64c-7e63-4068-bf85-697d7c155bfc" (UID: "4987c64c-7e63-4068-bf85-697d7c155bfc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.089202 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.089226 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4987c64c-7e63-4068-bf85-697d7c155bfc-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.146167 4922 scope.go:117] "RemoveContainer" containerID="33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.146492 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5\": container with ID starting with 33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5 not found: ID does not exist" containerID="33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.146521 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5"} err="failed to get container status \"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5\": rpc error: code = NotFound desc = could not find container \"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5\": container with ID starting with 33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.146543 4922 scope.go:117] "RemoveContainer" containerID="52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.147597 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404\": container with ID starting with 52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404 not found: ID does not exist" containerID="52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.147640 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404"} err="failed to get container status \"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404\": rpc error: code = NotFound desc = could not find container \"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404\": container with ID starting with 52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.147672 4922 scope.go:117] "RemoveContainer" containerID="3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.147963 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e\": container with ID starting with 3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e not found: ID does not exist" containerID="3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.147996 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e"} err="failed to get container status \"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e\": rpc error: code = NotFound desc = could not find container \"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e\": container with ID starting with 3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.148009 4922 scope.go:117] "RemoveContainer" containerID="840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.148485 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17\": container with ID starting with 840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17 not found: ID does not exist" containerID="840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.148523 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17"} err="failed to get container status \"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17\": rpc error: code = NotFound desc = could not find container \"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17\": container with ID starting with 840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.148538 4922 scope.go:117] "RemoveContainer" containerID="33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.148736 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5"} err="failed to get container status \"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5\": rpc error: code = NotFound desc = could not find container \"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5\": container with ID starting with 33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.148754 4922 scope.go:117] "RemoveContainer" containerID="52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.149684 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404"} err="failed to get container status \"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404\": rpc error: code = NotFound desc = could not find container \"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404\": container with ID starting with 52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.149701 4922 scope.go:117] "RemoveContainer" containerID="3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.149973 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e"} err="failed to get container status \"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e\": rpc error: code = NotFound desc = could not find container \"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e\": container with ID starting with 3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.150008 4922 scope.go:117] "RemoveContainer" containerID="840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.150423 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17"} err="failed to get container status \"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17\": rpc error: code = NotFound desc = could not find container \"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17\": container with ID starting with 840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.150440 4922 scope.go:117] "RemoveContainer" containerID="33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.150820 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5"} err="failed to get container status \"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5\": rpc error: code = NotFound desc = could not find container \"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5\": container with ID starting with 33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.150837 4922 scope.go:117] "RemoveContainer" containerID="52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.151065 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404"} err="failed to get container status \"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404\": rpc error: code = NotFound desc = could not find container \"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404\": container with ID starting with 52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.151083 4922 scope.go:117] "RemoveContainer" containerID="3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.151541 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e"} err="failed to get container status \"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e\": rpc error: code = NotFound desc = could not find container \"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e\": container with ID starting with 3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.151559 4922 scope.go:117] "RemoveContainer" containerID="840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.152165 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17"} err="failed to get container status \"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17\": rpc error: code = NotFound desc = could not find container \"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17\": container with ID starting with 840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.152189 4922 scope.go:117] "RemoveContainer" containerID="33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.152543 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5"} err="failed to get container status \"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5\": rpc error: code = NotFound desc = could not find container \"33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5\": container with ID starting with 33c0cfd018e0e305b3e90ad0856873f0fe328e611e17a0bab99e8cb1d88d56f5 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.152562 4922 scope.go:117] "RemoveContainer" containerID="52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.152901 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404"} err="failed to get container status \"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404\": rpc error: code = NotFound desc = could not find container \"52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404\": container with ID starting with 52c4e3920bd820605294cf6f9e251dfa581bc39e4627650896473dd64ca02404 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.152931 4922 scope.go:117] "RemoveContainer" containerID="3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.153210 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e"} err="failed to get container status \"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e\": rpc error: code = NotFound desc = could not find container \"3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e\": container with ID starting with 3b6ea0ab9cb3ad4dc17f65922c244a96e4dbde885f4e77f170a87844def93b3e not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.153231 4922 scope.go:117] "RemoveContainer" containerID="840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.153529 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17"} err="failed to get container status \"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17\": rpc error: code = NotFound desc = could not find container \"840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17\": container with ID starting with 840610b1843ba90e3f8b724ede16d3e0de7952ab6f8f2a7fa8c0ca52fd67bc17 not found: ID does not exist" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.248111 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.296786 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-public-tls-certs\") pod \"6316309d-31b2-4062-a285-322d33221ee6\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.296851 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-httpd-run\") pod \"6316309d-31b2-4062-a285-322d33221ee6\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.296941 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-config-data\") pod \"6316309d-31b2-4062-a285-322d33221ee6\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.296982 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-scripts\") pod \"6316309d-31b2-4062-a285-322d33221ee6\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.297004 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-combined-ca-bundle\") pod \"6316309d-31b2-4062-a285-322d33221ee6\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.297068 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gh8j\" (UniqueName: \"kubernetes.io/projected/6316309d-31b2-4062-a285-322d33221ee6-kube-api-access-2gh8j\") pod \"6316309d-31b2-4062-a285-322d33221ee6\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.297098 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-logs\") pod \"6316309d-31b2-4062-a285-322d33221ee6\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.297130 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"6316309d-31b2-4062-a285-322d33221ee6\" (UID: \"6316309d-31b2-4062-a285-322d33221ee6\") " Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.310570 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "6316309d-31b2-4062-a285-322d33221ee6" (UID: "6316309d-31b2-4062-a285-322d33221ee6"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.311173 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-logs" (OuterVolumeSpecName: "logs") pod "6316309d-31b2-4062-a285-322d33221ee6" (UID: "6316309d-31b2-4062-a285-322d33221ee6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.311502 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6316309d-31b2-4062-a285-322d33221ee6" (UID: "6316309d-31b2-4062-a285-322d33221ee6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.314273 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.314361 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-scripts" (OuterVolumeSpecName: "scripts") pod "6316309d-31b2-4062-a285-322d33221ee6" (UID: "6316309d-31b2-4062-a285-322d33221ee6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.324613 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6316309d-31b2-4062-a285-322d33221ee6-kube-api-access-2gh8j" (OuterVolumeSpecName: "kube-api-access-2gh8j") pod "6316309d-31b2-4062-a285-322d33221ee6" (UID: "6316309d-31b2-4062-a285-322d33221ee6"). InnerVolumeSpecName "kube-api-access-2gh8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.334570 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.347532 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.348106 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="755c37dc-48a9-4941-8410-1832fb4a78e8" containerName="mariadb-database-create" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.348172 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="755c37dc-48a9-4941-8410-1832fb4a78e8" containerName="mariadb-database-create" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.348225 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6316309d-31b2-4062-a285-322d33221ee6" containerName="glance-httpd" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.348273 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6316309d-31b2-4062-a285-322d33221ee6" containerName="glance-httpd" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.348333 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="ceilometer-notification-agent" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.348446 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="ceilometer-notification-agent" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.348531 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6316309d-31b2-4062-a285-322d33221ee6" containerName="glance-log" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.348583 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6316309d-31b2-4062-a285-322d33221ee6" containerName="glance-log" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.348630 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df501d92-801d-4caa-8d1e-da48b45182cf" containerName="mariadb-database-create" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.348676 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="df501d92-801d-4caa-8d1e-da48b45182cf" containerName="mariadb-database-create" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.348732 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="sg-core" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.348780 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="sg-core" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.348844 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3f528ca-b48a-4e5d-b801-620778a59ec6" containerName="mariadb-database-create" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.348901 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3f528ca-b48a-4e5d-b801-620778a59ec6" containerName="mariadb-database-create" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.348961 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="ceilometer-central-agent" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349014 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="ceilometer-central-agent" Sep 29 22:47:33 crc kubenswrapper[4922]: E0929 22:47:33.349077 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="proxy-httpd" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349130 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="proxy-httpd" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349335 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="proxy-httpd" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349410 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6316309d-31b2-4062-a285-322d33221ee6" containerName="glance-log" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349465 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6316309d-31b2-4062-a285-322d33221ee6" containerName="glance-httpd" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349521 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="755c37dc-48a9-4941-8410-1832fb4a78e8" containerName="mariadb-database-create" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349576 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="ceilometer-notification-agent" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349641 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="ceilometer-central-agent" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349700 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3f528ca-b48a-4e5d-b801-620778a59ec6" containerName="mariadb-database-create" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349752 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="df501d92-801d-4caa-8d1e-da48b45182cf" containerName="mariadb-database-create" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.349801 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" containerName="sg-core" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.351352 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.353332 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.356012 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.356962 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.393544 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6316309d-31b2-4062-a285-322d33221ee6" (UID: "6316309d-31b2-4062-a285-322d33221ee6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398417 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-scripts\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398468 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-run-httpd\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398502 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398530 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398566 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-config-data\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398593 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfxhp\" (UniqueName: \"kubernetes.io/projected/05864898-e5b4-4306-ae1b-ca2ae6513717-kube-api-access-mfxhp\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398616 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-log-httpd\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398718 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398735 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398743 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398754 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gh8j\" (UniqueName: \"kubernetes.io/projected/6316309d-31b2-4062-a285-322d33221ee6-kube-api-access-2gh8j\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398763 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6316309d-31b2-4062-a285-322d33221ee6-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.398784 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.399537 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6316309d-31b2-4062-a285-322d33221ee6" (UID: "6316309d-31b2-4062-a285-322d33221ee6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.424382 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.431494 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-config-data" (OuterVolumeSpecName: "config-data") pod "6316309d-31b2-4062-a285-322d33221ee6" (UID: "6316309d-31b2-4062-a285-322d33221ee6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500416 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500484 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-config-data\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500517 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfxhp\" (UniqueName: \"kubernetes.io/projected/05864898-e5b4-4306-ae1b-ca2ae6513717-kube-api-access-mfxhp\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500546 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-log-httpd\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500612 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-scripts\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500649 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-run-httpd\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500676 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500725 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500735 4922 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.500746 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6316309d-31b2-4062-a285-322d33221ee6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.501177 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-log-httpd\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.501385 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-run-httpd\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.503850 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.503942 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.504232 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-config-data\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.504673 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-scripts\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.517009 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfxhp\" (UniqueName: \"kubernetes.io/projected/05864898-e5b4-4306-ae1b-ca2ae6513717-kube-api-access-mfxhp\") pod \"ceilometer-0\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.675672 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.916537 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.968302 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"6316309d-31b2-4062-a285-322d33221ee6","Type":"ContainerDied","Data":"bf94a062e5a18ae5eee10d2c84095501e780d421c209ff71768e682119a5304a"} Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.968348 4922 scope.go:117] "RemoveContainer" containerID="06a3b9d4c3f527f22faa0d11a5b2108756dd731da4fd6b78f31da90cb6e66c13" Sep 29 22:47:33 crc kubenswrapper[4922]: I0929 22:47:33.968506 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.016574 4922 generic.go:334] "Generic (PLEG): container finished" podID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerID="d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70" exitCode=0 Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.016615 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"898e76fa-e8bb-4354-802f-5a6f6c14c0b0","Type":"ContainerDied","Data":"d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70"} Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.016643 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"898e76fa-e8bb-4354-802f-5a6f6c14c0b0","Type":"ContainerDied","Data":"ece89910a5e36e023fac8782901a68413fd0e751a7731d36c458d1cd9da22d07"} Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.016697 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.053650 4922 scope.go:117] "RemoveContainer" containerID="1c96e92ffa0c3272cb8760ec8cc7d7c542955b5586ada5d9e51c0bc13a600399" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.063455 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.087768 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.106104 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:47:34 crc kubenswrapper[4922]: E0929 22:47:34.106568 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerName="glance-log" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.106592 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerName="glance-log" Sep 29 22:47:34 crc kubenswrapper[4922]: E0929 22:47:34.106605 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerName="glance-httpd" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.106612 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerName="glance-httpd" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.106870 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerName="glance-log" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.106888 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" containerName="glance-httpd" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.107963 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.111416 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-logs\") pod \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.111511 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.111541 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-httpd-run\") pod \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.111561 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-internal-tls-certs\") pod \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.111590 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-combined-ca-bundle\") pod \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.111605 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-scripts\") pod \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.111662 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-config-data\") pod \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.111739 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b784l\" (UniqueName: \"kubernetes.io/projected/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-kube-api-access-b784l\") pod \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\" (UID: \"898e76fa-e8bb-4354-802f-5a6f6c14c0b0\") " Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.113438 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-logs" (OuterVolumeSpecName: "logs") pod "898e76fa-e8bb-4354-802f-5a6f6c14c0b0" (UID: "898e76fa-e8bb-4354-802f-5a6f6c14c0b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.117728 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.117940 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.124740 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "898e76fa-e8bb-4354-802f-5a6f6c14c0b0" (UID: "898e76fa-e8bb-4354-802f-5a6f6c14c0b0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.145684 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "898e76fa-e8bb-4354-802f-5a6f6c14c0b0" (UID: "898e76fa-e8bb-4354-802f-5a6f6c14c0b0"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.148479 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-scripts" (OuterVolumeSpecName: "scripts") pod "898e76fa-e8bb-4354-802f-5a6f6c14c0b0" (UID: "898e76fa-e8bb-4354-802f-5a6f6c14c0b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.193597 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-kube-api-access-b784l" (OuterVolumeSpecName: "kube-api-access-b784l") pod "898e76fa-e8bb-4354-802f-5a6f6c14c0b0" (UID: "898e76fa-e8bb-4354-802f-5a6f6c14c0b0"). InnerVolumeSpecName "kube-api-access-b784l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.194611 4922 scope.go:117] "RemoveContainer" containerID="d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213346 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-config-data\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213683 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-scripts\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213716 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213746 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213765 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213782 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg82b\" (UniqueName: \"kubernetes.io/projected/5b8254ca-83c1-49a8-b453-107577b54f01-kube-api-access-dg82b\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213803 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213836 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-logs\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213895 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213907 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213917 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213925 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b784l\" (UniqueName: \"kubernetes.io/projected/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-kube-api-access-b784l\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.213938 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.216892 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.240376 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "898e76fa-e8bb-4354-802f-5a6f6c14c0b0" (UID: "898e76fa-e8bb-4354-802f-5a6f6c14c0b0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.240909 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-config-data" (OuterVolumeSpecName: "config-data") pod "898e76fa-e8bb-4354-802f-5a6f6c14c0b0" (UID: "898e76fa-e8bb-4354-802f-5a6f6c14c0b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.243173 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.244978 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.245124 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "898e76fa-e8bb-4354-802f-5a6f6c14c0b0" (UID: "898e76fa-e8bb-4354-802f-5a6f6c14c0b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:34 crc kubenswrapper[4922]: W0929 22:47:34.248176 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05864898_e5b4_4306_ae1b_ca2ae6513717.slice/crio-60465f96647550eea2d8957602a2edb20f4dc9d947ca343d958202bff078f809 WatchSource:0}: Error finding container 60465f96647550eea2d8957602a2edb20f4dc9d947ca343d958202bff078f809: Status 404 returned error can't find the container with id 60465f96647550eea2d8957602a2edb20f4dc9d947ca343d958202bff078f809 Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.315750 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-logs\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.315821 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-config-data\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.315881 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-scripts\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.315913 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.315944 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.316010 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.316033 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg82b\" (UniqueName: \"kubernetes.io/projected/5b8254ca-83c1-49a8-b453-107577b54f01-kube-api-access-dg82b\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.316083 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.316138 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.316150 4922 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.316160 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.316169 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/898e76fa-e8bb-4354-802f-5a6f6c14c0b0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.316726 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.317017 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.317164 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-logs\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.319956 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.323571 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-scripts\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.324015 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.326654 4922 scope.go:117] "RemoveContainer" containerID="c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.328529 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-config-data\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.334378 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg82b\" (UniqueName: \"kubernetes.io/projected/5b8254ca-83c1-49a8-b453-107577b54f01-kube-api-access-dg82b\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.349544 4922 scope.go:117] "RemoveContainer" containerID="d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70" Sep 29 22:47:34 crc kubenswrapper[4922]: E0929 22:47:34.358665 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70\": container with ID starting with d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70 not found: ID does not exist" containerID="d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.358719 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70"} err="failed to get container status \"d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70\": rpc error: code = NotFound desc = could not find container \"d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70\": container with ID starting with d891d9e3aa010cf67fab78a655878afb39f61579e82aa3ca09cee7d657d0ac70 not found: ID does not exist" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.358748 4922 scope.go:117] "RemoveContainer" containerID="c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.358863 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:47:34 crc kubenswrapper[4922]: E0929 22:47:34.359030 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525\": container with ID starting with c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525 not found: ID does not exist" containerID="c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.359058 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525"} err="failed to get container status \"c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525\": rpc error: code = NotFound desc = could not find container \"c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525\": container with ID starting with c849cac181c0ecce667ee15ac84d8d630a386f2654425e5d105172987a386525 not found: ID does not exist" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.359361 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.370264 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.383882 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.385400 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.391587 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.391923 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.392028 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.433829 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4987c64c-7e63-4068-bf85-697d7c155bfc" path="/var/lib/kubelet/pods/4987c64c-7e63-4068-bf85-697d7c155bfc/volumes" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.435061 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6316309d-31b2-4062-a285-322d33221ee6" path="/var/lib/kubelet/pods/6316309d-31b2-4062-a285-322d33221ee6/volumes" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.436978 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="898e76fa-e8bb-4354-802f-5a6f6c14c0b0" path="/var/lib/kubelet/pods/898e76fa-e8bb-4354-802f-5a6f6c14c0b0/volumes" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.519265 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.519309 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.519350 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-logs\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.519374 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.519428 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-config-data\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.519548 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd7lp\" (UniqueName: \"kubernetes.io/projected/447099dc-1eea-4510-8b94-faa6899f6b06-kube-api-access-zd7lp\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.519599 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.519641 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-scripts\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.610781 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621040 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621076 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621106 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-logs\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621125 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621147 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-config-data\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621204 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd7lp\" (UniqueName: \"kubernetes.io/projected/447099dc-1eea-4510-8b94-faa6899f6b06-kube-api-access-zd7lp\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621221 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621236 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-scripts\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621785 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.621930 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-logs\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.622240 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.628319 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-scripts\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.629215 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.630261 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.631774 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-config-data\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.637326 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd7lp\" (UniqueName: \"kubernetes.io/projected/447099dc-1eea-4510-8b94-faa6899f6b06-kube-api-access-zd7lp\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.650240 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " pod="openstack/glance-default-internal-api-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.695720 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 22:47:34 crc kubenswrapper[4922]: I0929 22:47:34.712430 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:35 crc kubenswrapper[4922]: I0929 22:47:35.056669 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerStarted","Data":"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9"} Sep 29 22:47:35 crc kubenswrapper[4922]: I0929 22:47:35.056708 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerStarted","Data":"60465f96647550eea2d8957602a2edb20f4dc9d947ca343d958202bff078f809"} Sep 29 22:47:35 crc kubenswrapper[4922]: I0929 22:47:35.221918 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:47:35 crc kubenswrapper[4922]: I0929 22:47:35.325006 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:47:35 crc kubenswrapper[4922]: W0929 22:47:35.337064 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod447099dc_1eea_4510_8b94_faa6899f6b06.slice/crio-497fb562b8f648f338fccc14b87906721944a700859fef0001dd16c335a5cfef WatchSource:0}: Error finding container 497fb562b8f648f338fccc14b87906721944a700859fef0001dd16c335a5cfef: Status 404 returned error can't find the container with id 497fb562b8f648f338fccc14b87906721944a700859fef0001dd16c335a5cfef Sep 29 22:47:35 crc kubenswrapper[4922]: I0929 22:47:35.731897 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:36 crc kubenswrapper[4922]: I0929 22:47:36.065111 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"447099dc-1eea-4510-8b94-faa6899f6b06","Type":"ContainerStarted","Data":"497fb562b8f648f338fccc14b87906721944a700859fef0001dd16c335a5cfef"} Sep 29 22:47:36 crc kubenswrapper[4922]: I0929 22:47:36.066790 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5b8254ca-83c1-49a8-b453-107577b54f01","Type":"ContainerStarted","Data":"4bc42216ffedb37bd9c073af3cb65a977c43773e104a4af6d8bfe2f3f51e9e1a"} Sep 29 22:47:37 crc kubenswrapper[4922]: I0929 22:47:37.097549 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5b8254ca-83c1-49a8-b453-107577b54f01","Type":"ContainerStarted","Data":"f260bc8fa0de7751b280365c6bd0a0f523435c779c5725e4de6df27a0478f19c"} Sep 29 22:47:37 crc kubenswrapper[4922]: I0929 22:47:37.103477 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"447099dc-1eea-4510-8b94-faa6899f6b06","Type":"ContainerStarted","Data":"ccbfc95659bd8ac0f5153a3e15fe5796f00cb82dbede7bd19b2c1d755699bbe7"} Sep 29 22:47:38 crc kubenswrapper[4922]: I0929 22:47:38.118870 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerStarted","Data":"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838"} Sep 29 22:47:38 crc kubenswrapper[4922]: I0929 22:47:38.120973 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerStarted","Data":"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a"} Sep 29 22:47:38 crc kubenswrapper[4922]: I0929 22:47:38.121461 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5b8254ca-83c1-49a8-b453-107577b54f01","Type":"ContainerStarted","Data":"e9c29c8849b943f694f98cfb6baeef978eb44c7f2f718c45a1d5e25db0e098e6"} Sep 29 22:47:38 crc kubenswrapper[4922]: I0929 22:47:38.124016 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"447099dc-1eea-4510-8b94-faa6899f6b06","Type":"ContainerStarted","Data":"4bade6528d890c812a2f6c25a51fb063416552fbc042c3c5effa4226f2415177"} Sep 29 22:47:38 crc kubenswrapper[4922]: I0929 22:47:38.178919 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.178884844 podStartE2EDuration="4.178884844s" podCreationTimestamp="2025-09-29 22:47:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:47:38.155665609 +0000 UTC m=+1262.465954492" watchObservedRunningTime="2025-09-29 22:47:38.178884844 +0000 UTC m=+1262.489173687" Sep 29 22:47:38 crc kubenswrapper[4922]: I0929 22:47:38.211443 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.211422961 podStartE2EDuration="4.211422961s" podCreationTimestamp="2025-09-29 22:47:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:47:38.197018653 +0000 UTC m=+1262.507307506" watchObservedRunningTime="2025-09-29 22:47:38.211422961 +0000 UTC m=+1262.521711784" Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.146268 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerStarted","Data":"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791"} Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.146837 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.146818 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="ceilometer-central-agent" containerID="cri-o://5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9" gracePeriod=30 Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.147039 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="proxy-httpd" containerID="cri-o://e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791" gracePeriod=30 Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.147109 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="sg-core" containerID="cri-o://e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838" gracePeriod=30 Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.147181 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="ceilometer-notification-agent" containerID="cri-o://6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a" gracePeriod=30 Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.181867 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.550549784 podStartE2EDuration="7.181842405s" podCreationTimestamp="2025-09-29 22:47:33 +0000 UTC" firstStartedPulling="2025-09-29 22:47:34.258723787 +0000 UTC m=+1258.569012600" lastFinishedPulling="2025-09-29 22:47:38.890016408 +0000 UTC m=+1263.200305221" observedRunningTime="2025-09-29 22:47:40.168618167 +0000 UTC m=+1264.478906990" watchObservedRunningTime="2025-09-29 22:47:40.181842405 +0000 UTC m=+1264.492131238" Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.723163 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-9bbc-account-create-jjb4q"] Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.725506 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9bbc-account-create-jjb4q" Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.728009 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.734514 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9bbc-account-create-jjb4q"] Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.848748 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg85j\" (UniqueName: \"kubernetes.io/projected/fff35789-9cfe-47f4-8477-ddaf5caf85fc-kube-api-access-rg85j\") pod \"nova-api-9bbc-account-create-jjb4q\" (UID: \"fff35789-9cfe-47f4-8477-ddaf5caf85fc\") " pod="openstack/nova-api-9bbc-account-create-jjb4q" Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.893724 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-4139-account-create-lssc6"] Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.894829 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4139-account-create-lssc6" Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.896492 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.903436 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4139-account-create-lssc6"] Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.939860 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:40 crc kubenswrapper[4922]: I0929 22:47:40.949756 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg85j\" (UniqueName: \"kubernetes.io/projected/fff35789-9cfe-47f4-8477-ddaf5caf85fc-kube-api-access-rg85j\") pod \"nova-api-9bbc-account-create-jjb4q\" (UID: \"fff35789-9cfe-47f4-8477-ddaf5caf85fc\") " pod="openstack/nova-api-9bbc-account-create-jjb4q" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.011632 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg85j\" (UniqueName: \"kubernetes.io/projected/fff35789-9cfe-47f4-8477-ddaf5caf85fc-kube-api-access-rg85j\") pod \"nova-api-9bbc-account-create-jjb4q\" (UID: \"fff35789-9cfe-47f4-8477-ddaf5caf85fc\") " pod="openstack/nova-api-9bbc-account-create-jjb4q" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.051434 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-config-data\") pod \"05864898-e5b4-4306-ae1b-ca2ae6513717\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.051492 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-sg-core-conf-yaml\") pod \"05864898-e5b4-4306-ae1b-ca2ae6513717\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.051573 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfxhp\" (UniqueName: \"kubernetes.io/projected/05864898-e5b4-4306-ae1b-ca2ae6513717-kube-api-access-mfxhp\") pod \"05864898-e5b4-4306-ae1b-ca2ae6513717\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.051639 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-scripts\") pod \"05864898-e5b4-4306-ae1b-ca2ae6513717\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.051656 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-combined-ca-bundle\") pod \"05864898-e5b4-4306-ae1b-ca2ae6513717\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.051714 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-run-httpd\") pod \"05864898-e5b4-4306-ae1b-ca2ae6513717\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.051745 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-log-httpd\") pod \"05864898-e5b4-4306-ae1b-ca2ae6513717\" (UID: \"05864898-e5b4-4306-ae1b-ca2ae6513717\") " Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.052027 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzqgc\" (UniqueName: \"kubernetes.io/projected/f9ef1309-66cb-4726-b646-941bd87b27b1-kube-api-access-fzqgc\") pod \"nova-cell0-4139-account-create-lssc6\" (UID: \"f9ef1309-66cb-4726-b646-941bd87b27b1\") " pod="openstack/nova-cell0-4139-account-create-lssc6" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.052550 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "05864898-e5b4-4306-ae1b-ca2ae6513717" (UID: "05864898-e5b4-4306-ae1b-ca2ae6513717"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.053090 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "05864898-e5b4-4306-ae1b-ca2ae6513717" (UID: "05864898-e5b4-4306-ae1b-ca2ae6513717"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.053583 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9bbc-account-create-jjb4q" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.055227 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05864898-e5b4-4306-ae1b-ca2ae6513717-kube-api-access-mfxhp" (OuterVolumeSpecName: "kube-api-access-mfxhp") pod "05864898-e5b4-4306-ae1b-ca2ae6513717" (UID: "05864898-e5b4-4306-ae1b-ca2ae6513717"). InnerVolumeSpecName "kube-api-access-mfxhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.056907 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-scripts" (OuterVolumeSpecName: "scripts") pod "05864898-e5b4-4306-ae1b-ca2ae6513717" (UID: "05864898-e5b4-4306-ae1b-ca2ae6513717"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.093813 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "05864898-e5b4-4306-ae1b-ca2ae6513717" (UID: "05864898-e5b4-4306-ae1b-ca2ae6513717"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.102645 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-fcf1-account-create-cdp2t"] Sep 29 22:47:41 crc kubenswrapper[4922]: E0929 22:47:41.103138 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="ceilometer-central-agent" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.103155 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="ceilometer-central-agent" Sep 29 22:47:41 crc kubenswrapper[4922]: E0929 22:47:41.103183 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="ceilometer-notification-agent" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.103189 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="ceilometer-notification-agent" Sep 29 22:47:41 crc kubenswrapper[4922]: E0929 22:47:41.103202 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="sg-core" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.103207 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="sg-core" Sep 29 22:47:41 crc kubenswrapper[4922]: E0929 22:47:41.103220 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="proxy-httpd" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.103226 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="proxy-httpd" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.103438 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="ceilometer-notification-agent" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.103461 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="proxy-httpd" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.103472 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="ceilometer-central-agent" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.103492 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerName="sg-core" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.118491 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-fcf1-account-create-cdp2t"] Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.118587 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fcf1-account-create-cdp2t" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.122448 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.134401 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05864898-e5b4-4306-ae1b-ca2ae6513717" (UID: "05864898-e5b4-4306-ae1b-ca2ae6513717"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.153342 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzqgc\" (UniqueName: \"kubernetes.io/projected/f9ef1309-66cb-4726-b646-941bd87b27b1-kube-api-access-fzqgc\") pod \"nova-cell0-4139-account-create-lssc6\" (UID: \"f9ef1309-66cb-4726-b646-941bd87b27b1\") " pod="openstack/nova-cell0-4139-account-create-lssc6" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.153668 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.153728 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfxhp\" (UniqueName: \"kubernetes.io/projected/05864898-e5b4-4306-ae1b-ca2ae6513717-kube-api-access-mfxhp\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.153776 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.153798 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.153808 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.153816 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/05864898-e5b4-4306-ae1b-ca2ae6513717-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162360 4922 generic.go:334] "Generic (PLEG): container finished" podID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerID="e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791" exitCode=0 Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162396 4922 generic.go:334] "Generic (PLEG): container finished" podID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerID="e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838" exitCode=2 Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162405 4922 generic.go:334] "Generic (PLEG): container finished" podID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerID="6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a" exitCode=0 Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162425 4922 generic.go:334] "Generic (PLEG): container finished" podID="05864898-e5b4-4306-ae1b-ca2ae6513717" containerID="5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9" exitCode=0 Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162447 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerDied","Data":"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791"} Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162475 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerDied","Data":"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838"} Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162487 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerDied","Data":"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a"} Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162496 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerDied","Data":"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9"} Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162505 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"05864898-e5b4-4306-ae1b-ca2ae6513717","Type":"ContainerDied","Data":"60465f96647550eea2d8957602a2edb20f4dc9d947ca343d958202bff078f809"} Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162826 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.162867 4922 scope.go:117] "RemoveContainer" containerID="e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.170933 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzqgc\" (UniqueName: \"kubernetes.io/projected/f9ef1309-66cb-4726-b646-941bd87b27b1-kube-api-access-fzqgc\") pod \"nova-cell0-4139-account-create-lssc6\" (UID: \"f9ef1309-66cb-4726-b646-941bd87b27b1\") " pod="openstack/nova-cell0-4139-account-create-lssc6" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.206210 4922 scope.go:117] "RemoveContainer" containerID="e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.225652 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-config-data" (OuterVolumeSpecName: "config-data") pod "05864898-e5b4-4306-ae1b-ca2ae6513717" (UID: "05864898-e5b4-4306-ae1b-ca2ae6513717"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.228837 4922 scope.go:117] "RemoveContainer" containerID="6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.249604 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4139-account-create-lssc6" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.257856 4922 scope.go:117] "RemoveContainer" containerID="5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.259552 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m67rx\" (UniqueName: \"kubernetes.io/projected/7253e3fb-15f2-40c8-886e-98730dd4279b-kube-api-access-m67rx\") pod \"nova-cell1-fcf1-account-create-cdp2t\" (UID: \"7253e3fb-15f2-40c8-886e-98730dd4279b\") " pod="openstack/nova-cell1-fcf1-account-create-cdp2t" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.259769 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05864898-e5b4-4306-ae1b-ca2ae6513717-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.362774 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m67rx\" (UniqueName: \"kubernetes.io/projected/7253e3fb-15f2-40c8-886e-98730dd4279b-kube-api-access-m67rx\") pod \"nova-cell1-fcf1-account-create-cdp2t\" (UID: \"7253e3fb-15f2-40c8-886e-98730dd4279b\") " pod="openstack/nova-cell1-fcf1-account-create-cdp2t" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.380956 4922 scope.go:117] "RemoveContainer" containerID="e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.381789 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m67rx\" (UniqueName: \"kubernetes.io/projected/7253e3fb-15f2-40c8-886e-98730dd4279b-kube-api-access-m67rx\") pod \"nova-cell1-fcf1-account-create-cdp2t\" (UID: \"7253e3fb-15f2-40c8-886e-98730dd4279b\") " pod="openstack/nova-cell1-fcf1-account-create-cdp2t" Sep 29 22:47:41 crc kubenswrapper[4922]: E0929 22:47:41.384658 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791\": container with ID starting with e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791 not found: ID does not exist" containerID="e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.384841 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791"} err="failed to get container status \"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791\": rpc error: code = NotFound desc = could not find container \"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791\": container with ID starting with e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.384869 4922 scope.go:117] "RemoveContainer" containerID="e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838" Sep 29 22:47:41 crc kubenswrapper[4922]: E0929 22:47:41.385958 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838\": container with ID starting with e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838 not found: ID does not exist" containerID="e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.385988 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838"} err="failed to get container status \"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838\": rpc error: code = NotFound desc = could not find container \"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838\": container with ID starting with e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.386014 4922 scope.go:117] "RemoveContainer" containerID="6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a" Sep 29 22:47:41 crc kubenswrapper[4922]: E0929 22:47:41.386230 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a\": container with ID starting with 6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a not found: ID does not exist" containerID="6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.386246 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a"} err="failed to get container status \"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a\": rpc error: code = NotFound desc = could not find container \"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a\": container with ID starting with 6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.386258 4922 scope.go:117] "RemoveContainer" containerID="5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9" Sep 29 22:47:41 crc kubenswrapper[4922]: E0929 22:47:41.386473 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9\": container with ID starting with 5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9 not found: ID does not exist" containerID="5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.386489 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9"} err="failed to get container status \"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9\": rpc error: code = NotFound desc = could not find container \"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9\": container with ID starting with 5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.386500 4922 scope.go:117] "RemoveContainer" containerID="e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.386647 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791"} err="failed to get container status \"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791\": rpc error: code = NotFound desc = could not find container \"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791\": container with ID starting with e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.386663 4922 scope.go:117] "RemoveContainer" containerID="e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.386871 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838"} err="failed to get container status \"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838\": rpc error: code = NotFound desc = could not find container \"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838\": container with ID starting with e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.386890 4922 scope.go:117] "RemoveContainer" containerID="6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.387044 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a"} err="failed to get container status \"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a\": rpc error: code = NotFound desc = could not find container \"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a\": container with ID starting with 6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.387062 4922 scope.go:117] "RemoveContainer" containerID="5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.387213 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9"} err="failed to get container status \"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9\": rpc error: code = NotFound desc = could not find container \"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9\": container with ID starting with 5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.387229 4922 scope.go:117] "RemoveContainer" containerID="e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.387358 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791"} err="failed to get container status \"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791\": rpc error: code = NotFound desc = could not find container \"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791\": container with ID starting with e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.387374 4922 scope.go:117] "RemoveContainer" containerID="e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.395515 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838"} err="failed to get container status \"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838\": rpc error: code = NotFound desc = could not find container \"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838\": container with ID starting with e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.395561 4922 scope.go:117] "RemoveContainer" containerID="6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.396979 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a"} err="failed to get container status \"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a\": rpc error: code = NotFound desc = could not find container \"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a\": container with ID starting with 6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.397007 4922 scope.go:117] "RemoveContainer" containerID="5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.397363 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9"} err="failed to get container status \"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9\": rpc error: code = NotFound desc = could not find container \"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9\": container with ID starting with 5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.397377 4922 scope.go:117] "RemoveContainer" containerID="e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.400604 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791"} err="failed to get container status \"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791\": rpc error: code = NotFound desc = could not find container \"e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791\": container with ID starting with e25505143517ce0aa6ce674301e18caf6a916506edf3db1bf15ae856f225a791 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.400627 4922 scope.go:117] "RemoveContainer" containerID="e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.404594 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838"} err="failed to get container status \"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838\": rpc error: code = NotFound desc = could not find container \"e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838\": container with ID starting with e5ffa0e1eaa4de19baf2e792c943616bcc5dcd491854a2199e3ffec2f0100838 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.404620 4922 scope.go:117] "RemoveContainer" containerID="6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.416663 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a"} err="failed to get container status \"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a\": rpc error: code = NotFound desc = could not find container \"6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a\": container with ID starting with 6977bdf9ea341f59ac740bb1924f6be8f49cd4ba8b2e57ce867f148586de684a not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.416720 4922 scope.go:117] "RemoveContainer" containerID="5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.417133 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9"} err="failed to get container status \"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9\": rpc error: code = NotFound desc = could not find container \"5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9\": container with ID starting with 5a4c746229c08dd8269c1355318d7dd88e0a61a5adfd9f4a4c92e188177fd7f9 not found: ID does not exist" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.439331 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fcf1-account-create-cdp2t" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.529756 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9bbc-account-create-jjb4q"] Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.564638 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.571333 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.595428 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.597668 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.599550 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.603778 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.617134 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.668584 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.668952 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-scripts\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.668982 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-run-httpd\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.669024 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.669202 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-config-data\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.669232 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-log-httpd\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.669279 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snq66\" (UniqueName: \"kubernetes.io/projected/fc07cfb4-7049-47d4-a401-510daad0fd48-kube-api-access-snq66\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: W0929 22:47:41.695737 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9ef1309_66cb_4726_b646_941bd87b27b1.slice/crio-69e913424b8f123496095d499cb5af3d34f2da21265fea104bc889bf5f2de494 WatchSource:0}: Error finding container 69e913424b8f123496095d499cb5af3d34f2da21265fea104bc889bf5f2de494: Status 404 returned error can't find the container with id 69e913424b8f123496095d499cb5af3d34f2da21265fea104bc889bf5f2de494 Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.696119 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-4139-account-create-lssc6"] Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.779378 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snq66\" (UniqueName: \"kubernetes.io/projected/fc07cfb4-7049-47d4-a401-510daad0fd48-kube-api-access-snq66\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.779476 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.779500 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-scripts\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.779515 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-run-httpd\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.779547 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.779634 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-config-data\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.779657 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-log-httpd\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.780228 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-log-httpd\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.780312 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-run-httpd\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.784961 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.785539 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-config-data\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.787486 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.790164 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-scripts\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.797763 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snq66\" (UniqueName: \"kubernetes.io/projected/fc07cfb4-7049-47d4-a401-510daad0fd48-kube-api-access-snq66\") pod \"ceilometer-0\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " pod="openstack/ceilometer-0" Sep 29 22:47:41 crc kubenswrapper[4922]: W0929 22:47:41.888589 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7253e3fb_15f2_40c8_886e_98730dd4279b.slice/crio-d182bf557bcaa69d340ae6817305d1cce2983548dff2f698e78d2b46fa0916fb WatchSource:0}: Error finding container d182bf557bcaa69d340ae6817305d1cce2983548dff2f698e78d2b46fa0916fb: Status 404 returned error can't find the container with id d182bf557bcaa69d340ae6817305d1cce2983548dff2f698e78d2b46fa0916fb Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.890931 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-fcf1-account-create-cdp2t"] Sep 29 22:47:41 crc kubenswrapper[4922]: I0929 22:47:41.923384 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.177510 4922 generic.go:334] "Generic (PLEG): container finished" podID="fff35789-9cfe-47f4-8477-ddaf5caf85fc" containerID="a6f1bb89e5078fd01351e172f87b236110f6a224af2cfb4d3a86fea04668651f" exitCode=0 Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.177594 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9bbc-account-create-jjb4q" event={"ID":"fff35789-9cfe-47f4-8477-ddaf5caf85fc","Type":"ContainerDied","Data":"a6f1bb89e5078fd01351e172f87b236110f6a224af2cfb4d3a86fea04668651f"} Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.177867 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9bbc-account-create-jjb4q" event={"ID":"fff35789-9cfe-47f4-8477-ddaf5caf85fc","Type":"ContainerStarted","Data":"b7f17b09415e31740c49a97a157d51740e9cffaa271f379a008e5b32e26bc946"} Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.179798 4922 generic.go:334] "Generic (PLEG): container finished" podID="f9ef1309-66cb-4726-b646-941bd87b27b1" containerID="1e876adec5484e05b077c96b5b512d17c8e7fd01cafe21209613ed2494bca1f4" exitCode=0 Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.179856 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4139-account-create-lssc6" event={"ID":"f9ef1309-66cb-4726-b646-941bd87b27b1","Type":"ContainerDied","Data":"1e876adec5484e05b077c96b5b512d17c8e7fd01cafe21209613ed2494bca1f4"} Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.179878 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4139-account-create-lssc6" event={"ID":"f9ef1309-66cb-4726-b646-941bd87b27b1","Type":"ContainerStarted","Data":"69e913424b8f123496095d499cb5af3d34f2da21265fea104bc889bf5f2de494"} Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.182349 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fcf1-account-create-cdp2t" event={"ID":"7253e3fb-15f2-40c8-886e-98730dd4279b","Type":"ContainerStarted","Data":"7a62e125a44ebba0de06b0bbdee90bdf4a8ec082363e31330dd4e71e609bef7f"} Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.182370 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fcf1-account-create-cdp2t" event={"ID":"7253e3fb-15f2-40c8-886e-98730dd4279b","Type":"ContainerStarted","Data":"d182bf557bcaa69d340ae6817305d1cce2983548dff2f698e78d2b46fa0916fb"} Sep 29 22:47:42 crc kubenswrapper[4922]: W0929 22:47:42.369141 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc07cfb4_7049_47d4_a401_510daad0fd48.slice/crio-d95f8538a4c9d694688c0617c3d0f37edf4e6be6223e8f9af967e8960c56d0ab WatchSource:0}: Error finding container d95f8538a4c9d694688c0617c3d0f37edf4e6be6223e8f9af967e8960c56d0ab: Status 404 returned error can't find the container with id d95f8538a4c9d694688c0617c3d0f37edf4e6be6223e8f9af967e8960c56d0ab Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.385020 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:42 crc kubenswrapper[4922]: I0929 22:47:42.436645 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05864898-e5b4-4306-ae1b-ca2ae6513717" path="/var/lib/kubelet/pods/05864898-e5b4-4306-ae1b-ca2ae6513717/volumes" Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.200599 4922 generic.go:334] "Generic (PLEG): container finished" podID="7253e3fb-15f2-40c8-886e-98730dd4279b" containerID="7a62e125a44ebba0de06b0bbdee90bdf4a8ec082363e31330dd4e71e609bef7f" exitCode=0 Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.200801 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fcf1-account-create-cdp2t" event={"ID":"7253e3fb-15f2-40c8-886e-98730dd4279b","Type":"ContainerDied","Data":"7a62e125a44ebba0de06b0bbdee90bdf4a8ec082363e31330dd4e71e609bef7f"} Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.203730 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerStarted","Data":"d95f8538a4c9d694688c0617c3d0f37edf4e6be6223e8f9af967e8960c56d0ab"} Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.711727 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fcf1-account-create-cdp2t" Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.716863 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9bbc-account-create-jjb4q" Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.721839 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4139-account-create-lssc6" Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.836296 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzqgc\" (UniqueName: \"kubernetes.io/projected/f9ef1309-66cb-4726-b646-941bd87b27b1-kube-api-access-fzqgc\") pod \"f9ef1309-66cb-4726-b646-941bd87b27b1\" (UID: \"f9ef1309-66cb-4726-b646-941bd87b27b1\") " Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.836599 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m67rx\" (UniqueName: \"kubernetes.io/projected/7253e3fb-15f2-40c8-886e-98730dd4279b-kube-api-access-m67rx\") pod \"7253e3fb-15f2-40c8-886e-98730dd4279b\" (UID: \"7253e3fb-15f2-40c8-886e-98730dd4279b\") " Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.836765 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg85j\" (UniqueName: \"kubernetes.io/projected/fff35789-9cfe-47f4-8477-ddaf5caf85fc-kube-api-access-rg85j\") pod \"fff35789-9cfe-47f4-8477-ddaf5caf85fc\" (UID: \"fff35789-9cfe-47f4-8477-ddaf5caf85fc\") " Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.839997 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9ef1309-66cb-4726-b646-941bd87b27b1-kube-api-access-fzqgc" (OuterVolumeSpecName: "kube-api-access-fzqgc") pod "f9ef1309-66cb-4726-b646-941bd87b27b1" (UID: "f9ef1309-66cb-4726-b646-941bd87b27b1"). InnerVolumeSpecName "kube-api-access-fzqgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.840183 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fff35789-9cfe-47f4-8477-ddaf5caf85fc-kube-api-access-rg85j" (OuterVolumeSpecName: "kube-api-access-rg85j") pod "fff35789-9cfe-47f4-8477-ddaf5caf85fc" (UID: "fff35789-9cfe-47f4-8477-ddaf5caf85fc"). InnerVolumeSpecName "kube-api-access-rg85j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.841342 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7253e3fb-15f2-40c8-886e-98730dd4279b-kube-api-access-m67rx" (OuterVolumeSpecName: "kube-api-access-m67rx") pod "7253e3fb-15f2-40c8-886e-98730dd4279b" (UID: "7253e3fb-15f2-40c8-886e-98730dd4279b"). InnerVolumeSpecName "kube-api-access-m67rx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.939082 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m67rx\" (UniqueName: \"kubernetes.io/projected/7253e3fb-15f2-40c8-886e-98730dd4279b-kube-api-access-m67rx\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.939630 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg85j\" (UniqueName: \"kubernetes.io/projected/fff35789-9cfe-47f4-8477-ddaf5caf85fc-kube-api-access-rg85j\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:43 crc kubenswrapper[4922]: I0929 22:47:43.939692 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzqgc\" (UniqueName: \"kubernetes.io/projected/f9ef1309-66cb-4726-b646-941bd87b27b1-kube-api-access-fzqgc\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.215266 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerStarted","Data":"777289c8ad33c345d58c269308b236c6b2c1b7a1eef3f28a860c31035681fdb0"} Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.215322 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerStarted","Data":"fc94a7c4276d7df08750075dbfb4c168de36d7c5d3fca5c895bc04e64232793b"} Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.216997 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9bbc-account-create-jjb4q" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.217023 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9bbc-account-create-jjb4q" event={"ID":"fff35789-9cfe-47f4-8477-ddaf5caf85fc","Type":"ContainerDied","Data":"b7f17b09415e31740c49a97a157d51740e9cffaa271f379a008e5b32e26bc946"} Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.217096 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7f17b09415e31740c49a97a157d51740e9cffaa271f379a008e5b32e26bc946" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.218843 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-4139-account-create-lssc6" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.218894 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-4139-account-create-lssc6" event={"ID":"f9ef1309-66cb-4726-b646-941bd87b27b1","Type":"ContainerDied","Data":"69e913424b8f123496095d499cb5af3d34f2da21265fea104bc889bf5f2de494"} Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.219080 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69e913424b8f123496095d499cb5af3d34f2da21265fea104bc889bf5f2de494" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.220435 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fcf1-account-create-cdp2t" event={"ID":"7253e3fb-15f2-40c8-886e-98730dd4279b","Type":"ContainerDied","Data":"d182bf557bcaa69d340ae6817305d1cce2983548dff2f698e78d2b46fa0916fb"} Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.220460 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d182bf557bcaa69d340ae6817305d1cce2983548dff2f698e78d2b46fa0916fb" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.220529 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fcf1-account-create-cdp2t" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.611480 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.611586 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.655575 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.676756 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.713012 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.713294 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.771113 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:44 crc kubenswrapper[4922]: I0929 22:47:44.772359 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:45 crc kubenswrapper[4922]: I0929 22:47:45.231763 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerStarted","Data":"08270f539002d69b60782589c13d804270a432ec875060d2d4ad0ffb8eca27be"} Sep 29 22:47:45 crc kubenswrapper[4922]: I0929 22:47:45.232268 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:45 crc kubenswrapper[4922]: I0929 22:47:45.232298 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 22:47:45 crc kubenswrapper[4922]: I0929 22:47:45.232309 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:45 crc kubenswrapper[4922]: I0929 22:47:45.232317 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.014005 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lrzqc"] Sep 29 22:47:46 crc kubenswrapper[4922]: E0929 22:47:46.014599 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7253e3fb-15f2-40c8-886e-98730dd4279b" containerName="mariadb-account-create" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.014619 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7253e3fb-15f2-40c8-886e-98730dd4279b" containerName="mariadb-account-create" Sep 29 22:47:46 crc kubenswrapper[4922]: E0929 22:47:46.014642 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9ef1309-66cb-4726-b646-941bd87b27b1" containerName="mariadb-account-create" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.014649 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9ef1309-66cb-4726-b646-941bd87b27b1" containerName="mariadb-account-create" Sep 29 22:47:46 crc kubenswrapper[4922]: E0929 22:47:46.014663 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fff35789-9cfe-47f4-8477-ddaf5caf85fc" containerName="mariadb-account-create" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.014668 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fff35789-9cfe-47f4-8477-ddaf5caf85fc" containerName="mariadb-account-create" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.014833 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="7253e3fb-15f2-40c8-886e-98730dd4279b" containerName="mariadb-account-create" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.014848 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9ef1309-66cb-4726-b646-941bd87b27b1" containerName="mariadb-account-create" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.014863 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fff35789-9cfe-47f4-8477-ddaf5caf85fc" containerName="mariadb-account-create" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.015387 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.018312 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.019107 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lqs8l" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.020870 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.026217 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lrzqc"] Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.080666 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-config-data\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.080712 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-scripts\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.080784 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljz8h\" (UniqueName: \"kubernetes.io/projected/94230f56-1036-4c04-8b74-b7138466df0c-kube-api-access-ljz8h\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.080816 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.182800 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-config-data\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.182868 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-scripts\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.182952 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljz8h\" (UniqueName: \"kubernetes.io/projected/94230f56-1036-4c04-8b74-b7138466df0c-kube-api-access-ljz8h\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.183015 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.186142 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.188065 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-scripts\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.190166 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-config-data\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.200703 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljz8h\" (UniqueName: \"kubernetes.io/projected/94230f56-1036-4c04-8b74-b7138466df0c-kube-api-access-ljz8h\") pod \"nova-cell0-conductor-db-sync-lrzqc\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.372995 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:47:46 crc kubenswrapper[4922]: I0929 22:47:46.919974 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lrzqc"] Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.200574 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.247034 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.267623 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerStarted","Data":"49fde7f0a8e5b771aec54162d0fa2abdfa3f49b4517757e4c2fe185467633406"} Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.268953 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.270336 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lrzqc" event={"ID":"94230f56-1036-4c04-8b74-b7138466df0c","Type":"ContainerStarted","Data":"46a3d2f1b7fa0e1fb2453bbab5a199244d08cc1b66a0200814bf5b091cfd9785"} Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.270377 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.270674 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.297507 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.51159896 podStartE2EDuration="6.297483919s" podCreationTimestamp="2025-09-29 22:47:41 +0000 UTC" firstStartedPulling="2025-09-29 22:47:42.372041747 +0000 UTC m=+1266.682330600" lastFinishedPulling="2025-09-29 22:47:46.157926746 +0000 UTC m=+1270.468215559" observedRunningTime="2025-09-29 22:47:47.288200719 +0000 UTC m=+1271.598489522" watchObservedRunningTime="2025-09-29 22:47:47.297483919 +0000 UTC m=+1271.607772732" Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.499809 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 22:47:47 crc kubenswrapper[4922]: I0929 22:47:47.530091 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 22:47:48 crc kubenswrapper[4922]: I0929 22:47:48.175705 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:49 crc kubenswrapper[4922]: I0929 22:47:49.288250 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="ceilometer-central-agent" containerID="cri-o://fc94a7c4276d7df08750075dbfb4c168de36d7c5d3fca5c895bc04e64232793b" gracePeriod=30 Sep 29 22:47:49 crc kubenswrapper[4922]: I0929 22:47:49.289187 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="proxy-httpd" containerID="cri-o://49fde7f0a8e5b771aec54162d0fa2abdfa3f49b4517757e4c2fe185467633406" gracePeriod=30 Sep 29 22:47:49 crc kubenswrapper[4922]: I0929 22:47:49.289218 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="sg-core" containerID="cri-o://08270f539002d69b60782589c13d804270a432ec875060d2d4ad0ffb8eca27be" gracePeriod=30 Sep 29 22:47:49 crc kubenswrapper[4922]: I0929 22:47:49.289231 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="ceilometer-notification-agent" containerID="cri-o://777289c8ad33c345d58c269308b236c6b2c1b7a1eef3f28a860c31035681fdb0" gracePeriod=30 Sep 29 22:47:50 crc kubenswrapper[4922]: I0929 22:47:50.301671 4922 generic.go:334] "Generic (PLEG): container finished" podID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerID="49fde7f0a8e5b771aec54162d0fa2abdfa3f49b4517757e4c2fe185467633406" exitCode=0 Sep 29 22:47:50 crc kubenswrapper[4922]: I0929 22:47:50.302131 4922 generic.go:334] "Generic (PLEG): container finished" podID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerID="08270f539002d69b60782589c13d804270a432ec875060d2d4ad0ffb8eca27be" exitCode=2 Sep 29 22:47:50 crc kubenswrapper[4922]: I0929 22:47:50.302140 4922 generic.go:334] "Generic (PLEG): container finished" podID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerID="777289c8ad33c345d58c269308b236c6b2c1b7a1eef3f28a860c31035681fdb0" exitCode=0 Sep 29 22:47:50 crc kubenswrapper[4922]: I0929 22:47:50.302149 4922 generic.go:334] "Generic (PLEG): container finished" podID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerID="fc94a7c4276d7df08750075dbfb4c168de36d7c5d3fca5c895bc04e64232793b" exitCode=0 Sep 29 22:47:50 crc kubenswrapper[4922]: I0929 22:47:50.301751 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerDied","Data":"49fde7f0a8e5b771aec54162d0fa2abdfa3f49b4517757e4c2fe185467633406"} Sep 29 22:47:50 crc kubenswrapper[4922]: I0929 22:47:50.302183 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerDied","Data":"08270f539002d69b60782589c13d804270a432ec875060d2d4ad0ffb8eca27be"} Sep 29 22:47:50 crc kubenswrapper[4922]: I0929 22:47:50.302197 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerDied","Data":"777289c8ad33c345d58c269308b236c6b2c1b7a1eef3f28a860c31035681fdb0"} Sep 29 22:47:50 crc kubenswrapper[4922]: I0929 22:47:50.302206 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerDied","Data":"fc94a7c4276d7df08750075dbfb4c168de36d7c5d3fca5c895bc04e64232793b"} Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.629058 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.661789 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-log-httpd\") pod \"fc07cfb4-7049-47d4-a401-510daad0fd48\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.661906 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-config-data\") pod \"fc07cfb4-7049-47d4-a401-510daad0fd48\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.661937 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-combined-ca-bundle\") pod \"fc07cfb4-7049-47d4-a401-510daad0fd48\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.662029 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snq66\" (UniqueName: \"kubernetes.io/projected/fc07cfb4-7049-47d4-a401-510daad0fd48-kube-api-access-snq66\") pod \"fc07cfb4-7049-47d4-a401-510daad0fd48\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.662050 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-run-httpd\") pod \"fc07cfb4-7049-47d4-a401-510daad0fd48\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.662078 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-sg-core-conf-yaml\") pod \"fc07cfb4-7049-47d4-a401-510daad0fd48\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.662117 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-scripts\") pod \"fc07cfb4-7049-47d4-a401-510daad0fd48\" (UID: \"fc07cfb4-7049-47d4-a401-510daad0fd48\") " Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.665479 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fc07cfb4-7049-47d4-a401-510daad0fd48" (UID: "fc07cfb4-7049-47d4-a401-510daad0fd48"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.665848 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fc07cfb4-7049-47d4-a401-510daad0fd48" (UID: "fc07cfb4-7049-47d4-a401-510daad0fd48"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.666938 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-scripts" (OuterVolumeSpecName: "scripts") pod "fc07cfb4-7049-47d4-a401-510daad0fd48" (UID: "fc07cfb4-7049-47d4-a401-510daad0fd48"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.671130 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc07cfb4-7049-47d4-a401-510daad0fd48-kube-api-access-snq66" (OuterVolumeSpecName: "kube-api-access-snq66") pod "fc07cfb4-7049-47d4-a401-510daad0fd48" (UID: "fc07cfb4-7049-47d4-a401-510daad0fd48"). InnerVolumeSpecName "kube-api-access-snq66". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.713048 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fc07cfb4-7049-47d4-a401-510daad0fd48" (UID: "fc07cfb4-7049-47d4-a401-510daad0fd48"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.766242 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snq66\" (UniqueName: \"kubernetes.io/projected/fc07cfb4-7049-47d4-a401-510daad0fd48-kube-api-access-snq66\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.766290 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.766310 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.766328 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.766346 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc07cfb4-7049-47d4-a401-510daad0fd48-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.772491 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc07cfb4-7049-47d4-a401-510daad0fd48" (UID: "fc07cfb4-7049-47d4-a401-510daad0fd48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.786368 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-config-data" (OuterVolumeSpecName: "config-data") pod "fc07cfb4-7049-47d4-a401-510daad0fd48" (UID: "fc07cfb4-7049-47d4-a401-510daad0fd48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.867158 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:53 crc kubenswrapper[4922]: I0929 22:47:53.867182 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc07cfb4-7049-47d4-a401-510daad0fd48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.351147 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lrzqc" event={"ID":"94230f56-1036-4c04-8b74-b7138466df0c","Type":"ContainerStarted","Data":"0eefb0b1bba7e0faa2f93c51c1ba92a9d77c634ccf72229caec29707da0e3782"} Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.357073 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fc07cfb4-7049-47d4-a401-510daad0fd48","Type":"ContainerDied","Data":"d95f8538a4c9d694688c0617c3d0f37edf4e6be6223e8f9af967e8960c56d0ab"} Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.357160 4922 scope.go:117] "RemoveContainer" containerID="49fde7f0a8e5b771aec54162d0fa2abdfa3f49b4517757e4c2fe185467633406" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.357181 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.375695 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-lrzqc" podStartSLOduration=2.817131078 podStartE2EDuration="9.375665765s" podCreationTimestamp="2025-09-29 22:47:45 +0000 UTC" firstStartedPulling="2025-09-29 22:47:46.940098032 +0000 UTC m=+1271.250386845" lastFinishedPulling="2025-09-29 22:47:53.498632679 +0000 UTC m=+1277.808921532" observedRunningTime="2025-09-29 22:47:54.375258405 +0000 UTC m=+1278.685547308" watchObservedRunningTime="2025-09-29 22:47:54.375665765 +0000 UTC m=+1278.685954658" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.398785 4922 scope.go:117] "RemoveContainer" containerID="08270f539002d69b60782589c13d804270a432ec875060d2d4ad0ffb8eca27be" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.471075 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.471127 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.471152 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.471581 4922 scope.go:117] "RemoveContainer" containerID="777289c8ad33c345d58c269308b236c6b2c1b7a1eef3f28a860c31035681fdb0" Sep 29 22:47:54 crc kubenswrapper[4922]: E0929 22:47:54.471639 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="sg-core" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.471660 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="sg-core" Sep 29 22:47:54 crc kubenswrapper[4922]: E0929 22:47:54.471688 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="ceilometer-notification-agent" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.471700 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="ceilometer-notification-agent" Sep 29 22:47:54 crc kubenswrapper[4922]: E0929 22:47:54.471730 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="proxy-httpd" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.471740 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="proxy-httpd" Sep 29 22:47:54 crc kubenswrapper[4922]: E0929 22:47:54.471757 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="ceilometer-central-agent" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.471768 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="ceilometer-central-agent" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.472033 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="sg-core" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.472053 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="ceilometer-central-agent" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.472068 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="proxy-httpd" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.472107 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" containerName="ceilometer-notification-agent" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.476755 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.476914 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.480269 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.481698 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.533544 4922 scope.go:117] "RemoveContainer" containerID="fc94a7c4276d7df08750075dbfb4c168de36d7c5d3fca5c895bc04e64232793b" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.582124 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-scripts\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.582170 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-log-httpd\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.582465 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-run-httpd\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.582566 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.582610 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.582902 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmggd\" (UniqueName: \"kubernetes.io/projected/1d483395-0697-412a-8072-897e32b7d492-kube-api-access-mmggd\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.582958 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-config-data\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.684989 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-run-httpd\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.685040 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.685062 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.685139 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmggd\" (UniqueName: \"kubernetes.io/projected/1d483395-0697-412a-8072-897e32b7d492-kube-api-access-mmggd\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.685160 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-config-data\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.685195 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-scripts\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.685215 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-log-httpd\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.685474 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-run-httpd\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.685653 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-log-httpd\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.690668 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.690740 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-scripts\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.695924 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.704968 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmggd\" (UniqueName: \"kubernetes.io/projected/1d483395-0697-412a-8072-897e32b7d492-kube-api-access-mmggd\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.705972 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-config-data\") pod \"ceilometer-0\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " pod="openstack/ceilometer-0" Sep 29 22:47:54 crc kubenswrapper[4922]: I0929 22:47:54.830393 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:47:55 crc kubenswrapper[4922]: I0929 22:47:55.140533 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:47:55 crc kubenswrapper[4922]: I0929 22:47:55.369165 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerStarted","Data":"458ad1f893996fa345a319b66b4419812d114eacbd0aa4f879e5ee9f86616f14"} Sep 29 22:47:56 crc kubenswrapper[4922]: I0929 22:47:56.379718 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerStarted","Data":"8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93"} Sep 29 22:47:56 crc kubenswrapper[4922]: I0929 22:47:56.449026 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc07cfb4-7049-47d4-a401-510daad0fd48" path="/var/lib/kubelet/pods/fc07cfb4-7049-47d4-a401-510daad0fd48/volumes" Sep 29 22:47:57 crc kubenswrapper[4922]: I0929 22:47:57.392476 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerStarted","Data":"c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a"} Sep 29 22:47:57 crc kubenswrapper[4922]: I0929 22:47:57.393110 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerStarted","Data":"e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11"} Sep 29 22:47:59 crc kubenswrapper[4922]: I0929 22:47:59.417950 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerStarted","Data":"008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259"} Sep 29 22:47:59 crc kubenswrapper[4922]: I0929 22:47:59.418632 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 22:47:59 crc kubenswrapper[4922]: I0929 22:47:59.457663 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.9455807250000001 podStartE2EDuration="5.457639768s" podCreationTimestamp="2025-09-29 22:47:54 +0000 UTC" firstStartedPulling="2025-09-29 22:47:55.148169561 +0000 UTC m=+1279.458458374" lastFinishedPulling="2025-09-29 22:47:58.660228604 +0000 UTC m=+1282.970517417" observedRunningTime="2025-09-29 22:47:59.444039421 +0000 UTC m=+1283.754328254" watchObservedRunningTime="2025-09-29 22:47:59.457639768 +0000 UTC m=+1283.767928591" Sep 29 22:48:05 crc kubenswrapper[4922]: I0929 22:48:05.494217 4922 generic.go:334] "Generic (PLEG): container finished" podID="94230f56-1036-4c04-8b74-b7138466df0c" containerID="0eefb0b1bba7e0faa2f93c51c1ba92a9d77c634ccf72229caec29707da0e3782" exitCode=0 Sep 29 22:48:05 crc kubenswrapper[4922]: I0929 22:48:05.494817 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lrzqc" event={"ID":"94230f56-1036-4c04-8b74-b7138466df0c","Type":"ContainerDied","Data":"0eefb0b1bba7e0faa2f93c51c1ba92a9d77c634ccf72229caec29707da0e3782"} Sep 29 22:48:06 crc kubenswrapper[4922]: I0929 22:48:06.878116 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.037504 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-scripts\") pod \"94230f56-1036-4c04-8b74-b7138466df0c\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.037565 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-config-data\") pod \"94230f56-1036-4c04-8b74-b7138466df0c\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.037716 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljz8h\" (UniqueName: \"kubernetes.io/projected/94230f56-1036-4c04-8b74-b7138466df0c-kube-api-access-ljz8h\") pod \"94230f56-1036-4c04-8b74-b7138466df0c\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.037782 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-combined-ca-bundle\") pod \"94230f56-1036-4c04-8b74-b7138466df0c\" (UID: \"94230f56-1036-4c04-8b74-b7138466df0c\") " Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.045459 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-scripts" (OuterVolumeSpecName: "scripts") pod "94230f56-1036-4c04-8b74-b7138466df0c" (UID: "94230f56-1036-4c04-8b74-b7138466df0c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.045655 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94230f56-1036-4c04-8b74-b7138466df0c-kube-api-access-ljz8h" (OuterVolumeSpecName: "kube-api-access-ljz8h") pod "94230f56-1036-4c04-8b74-b7138466df0c" (UID: "94230f56-1036-4c04-8b74-b7138466df0c"). InnerVolumeSpecName "kube-api-access-ljz8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.067114 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94230f56-1036-4c04-8b74-b7138466df0c" (UID: "94230f56-1036-4c04-8b74-b7138466df0c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.089258 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-config-data" (OuterVolumeSpecName: "config-data") pod "94230f56-1036-4c04-8b74-b7138466df0c" (UID: "94230f56-1036-4c04-8b74-b7138466df0c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.139475 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.139500 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.139513 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljz8h\" (UniqueName: \"kubernetes.io/projected/94230f56-1036-4c04-8b74-b7138466df0c-kube-api-access-ljz8h\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.139526 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94230f56-1036-4c04-8b74-b7138466df0c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.539983 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-lrzqc" event={"ID":"94230f56-1036-4c04-8b74-b7138466df0c","Type":"ContainerDied","Data":"46a3d2f1b7fa0e1fb2453bbab5a199244d08cc1b66a0200814bf5b091cfd9785"} Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.540044 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46a3d2f1b7fa0e1fb2453bbab5a199244d08cc1b66a0200814bf5b091cfd9785" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.540115 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-lrzqc" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.631681 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 22:48:07 crc kubenswrapper[4922]: E0929 22:48:07.637221 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94230f56-1036-4c04-8b74-b7138466df0c" containerName="nova-cell0-conductor-db-sync" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.637268 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="94230f56-1036-4c04-8b74-b7138466df0c" containerName="nova-cell0-conductor-db-sync" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.643539 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="94230f56-1036-4c04-8b74-b7138466df0c" containerName="nova-cell0-conductor-db-sync" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.646737 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.658536 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.674842 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.675049 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lqs8l" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.768163 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kffb4\" (UniqueName: \"kubernetes.io/projected/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-kube-api-access-kffb4\") pod \"nova-cell0-conductor-0\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.768238 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.768335 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.870138 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.870442 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kffb4\" (UniqueName: \"kubernetes.io/projected/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-kube-api-access-kffb4\") pod \"nova-cell0-conductor-0\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.870608 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.876233 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.877265 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:07 crc kubenswrapper[4922]: I0929 22:48:07.905700 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kffb4\" (UniqueName: \"kubernetes.io/projected/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-kube-api-access-kffb4\") pod \"nova-cell0-conductor-0\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:08 crc kubenswrapper[4922]: I0929 22:48:08.009459 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:08 crc kubenswrapper[4922]: I0929 22:48:08.358954 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 22:48:08 crc kubenswrapper[4922]: W0929 22:48:08.363590 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a5d1af0_eb7d_46ad_b4f1_eceb10445896.slice/crio-1f236449c45f35491ce65fd93e932ac6e2fc60b779e62e134d7470e1d015108f WatchSource:0}: Error finding container 1f236449c45f35491ce65fd93e932ac6e2fc60b779e62e134d7470e1d015108f: Status 404 returned error can't find the container with id 1f236449c45f35491ce65fd93e932ac6e2fc60b779e62e134d7470e1d015108f Sep 29 22:48:08 crc kubenswrapper[4922]: I0929 22:48:08.552484 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0a5d1af0-eb7d-46ad-b4f1-eceb10445896","Type":"ContainerStarted","Data":"03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18"} Sep 29 22:48:08 crc kubenswrapper[4922]: I0929 22:48:08.552556 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0a5d1af0-eb7d-46ad-b4f1-eceb10445896","Type":"ContainerStarted","Data":"1f236449c45f35491ce65fd93e932ac6e2fc60b779e62e134d7470e1d015108f"} Sep 29 22:48:08 crc kubenswrapper[4922]: I0929 22:48:08.552587 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:08 crc kubenswrapper[4922]: I0929 22:48:08.577182 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.5771549459999998 podStartE2EDuration="1.577154946s" podCreationTimestamp="2025-09-29 22:48:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:08.571889615 +0000 UTC m=+1292.882178438" watchObservedRunningTime="2025-09-29 22:48:08.577154946 +0000 UTC m=+1292.887443759" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.059954 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.642187 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-mqg7f"] Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.644968 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.647335 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.648144 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.673454 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-mqg7f"] Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.712841 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-config-data\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.712919 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.712950 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fx2t\" (UniqueName: \"kubernetes.io/projected/c069ee2e-ba38-4d12-8090-81842b86051a-kube-api-access-5fx2t\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.713504 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-scripts\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.815124 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-scripts\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.815197 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-config-data\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.815233 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.815257 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fx2t\" (UniqueName: \"kubernetes.io/projected/c069ee2e-ba38-4d12-8090-81842b86051a-kube-api-access-5fx2t\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.823677 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.827047 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-config-data\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.827487 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-scripts\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.847173 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.848743 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.855917 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.868150 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.878021 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fx2t\" (UniqueName: \"kubernetes.io/projected/c069ee2e-ba38-4d12-8090-81842b86051a-kube-api-access-5fx2t\") pod \"nova-cell0-cell-mapping-mqg7f\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.924299 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9fca9a1-0875-4ec4-a731-067e1a80273f-logs\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.924351 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-config-data\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.924373 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86hdz\" (UniqueName: \"kubernetes.io/projected/c9fca9a1-0875-4ec4-a731-067e1a80273f-kube-api-access-86hdz\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.924455 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.970504 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.971668 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.977662 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.983936 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:48:13 crc kubenswrapper[4922]: I0929 22:48:13.991090 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.035307 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bps4v\" (UniqueName: \"kubernetes.io/projected/6e877b64-fb68-493e-8d8a-a685dc71ef6f-kube-api-access-bps4v\") pod \"nova-scheduler-0\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.035433 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-config-data\") pod \"nova-scheduler-0\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.035468 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.035514 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9fca9a1-0875-4ec4-a731-067e1a80273f-logs\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.035541 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-config-data\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.035561 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86hdz\" (UniqueName: \"kubernetes.io/projected/c9fca9a1-0875-4ec4-a731-067e1a80273f-kube-api-access-86hdz\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.035649 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.038936 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9fca9a1-0875-4ec4-a731-067e1a80273f-logs\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.083149 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.114033 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86hdz\" (UniqueName: \"kubernetes.io/projected/c9fca9a1-0875-4ec4-a731-067e1a80273f-kube-api-access-86hdz\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.118877 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-config-data\") pod \"nova-api-0\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " pod="openstack/nova-api-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.123712 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.125427 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.147729 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.148753 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.149668 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.149717 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bps4v\" (UniqueName: \"kubernetes.io/projected/6e877b64-fb68-493e-8d8a-a685dc71ef6f-kube-api-access-bps4v\") pod \"nova-scheduler-0\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.149770 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/094ff0cc-5eda-4a89-96a7-e8067418c9e0-logs\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.149790 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-config-data\") pod \"nova-scheduler-0\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.149808 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s96pr\" (UniqueName: \"kubernetes.io/projected/094ff0cc-5eda-4a89-96a7-e8067418c9e0-kube-api-access-s96pr\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.149830 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.149865 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-config-data\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.157786 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.160873 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-config-data\") pod \"nova-scheduler-0\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.176749 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.177925 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.184158 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.190685 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.197342 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bps4v\" (UniqueName: \"kubernetes.io/projected/6e877b64-fb68-493e-8d8a-a685dc71ef6f-kube-api-access-bps4v\") pod \"nova-scheduler-0\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.210515 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-mqchm"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.211786 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.238412 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-mqchm"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.251779 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.251849 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.251882 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-svc\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.251905 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbv5b\" (UniqueName: \"kubernetes.io/projected/5f316176-d569-40e2-a666-06d83e6bb959-kube-api-access-cbv5b\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.251924 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.251954 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-config\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.251969 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/094ff0cc-5eda-4a89-96a7-e8067418c9e0-logs\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.251987 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-727rs\" (UniqueName: \"kubernetes.io/projected/a032eb9f-381d-4249-b7ca-2e627f961119-kube-api-access-727rs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.252008 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s96pr\" (UniqueName: \"kubernetes.io/projected/094ff0cc-5eda-4a89-96a7-e8067418c9e0-kube-api-access-s96pr\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.252045 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.252063 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-config-data\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.252084 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.252105 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.255161 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/094ff0cc-5eda-4a89-96a7-e8067418c9e0-logs\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.257692 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.262748 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-config-data\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.278357 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s96pr\" (UniqueName: \"kubernetes.io/projected/094ff0cc-5eda-4a89-96a7-e8067418c9e0-kube-api-access-s96pr\") pod \"nova-metadata-0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.280254 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.353221 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.353283 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.353305 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.353368 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.353443 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-svc\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.353469 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbv5b\" (UniqueName: \"kubernetes.io/projected/5f316176-d569-40e2-a666-06d83e6bb959-kube-api-access-cbv5b\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.353486 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.353512 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-config\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.353530 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-727rs\" (UniqueName: \"kubernetes.io/projected/a032eb9f-381d-4249-b7ca-2e627f961119-kube-api-access-727rs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.354479 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.356291 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.356792 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-config\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.356823 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.357178 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-svc\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.360504 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.363194 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.364068 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.372187 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-727rs\" (UniqueName: \"kubernetes.io/projected/a032eb9f-381d-4249-b7ca-2e627f961119-kube-api-access-727rs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.373980 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbv5b\" (UniqueName: \"kubernetes.io/projected/5f316176-d569-40e2-a666-06d83e6bb959-kube-api-access-cbv5b\") pod \"dnsmasq-dns-865f5d856f-mqchm\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.536850 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.561118 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.571776 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.695849 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-mqg7f"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.781574 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-x2kgq"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.783742 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.789355 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.805522 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.829424 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-x2kgq"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.856342 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.865085 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.865138 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-config-data\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.865164 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-scripts\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.865205 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6p9f\" (UniqueName: \"kubernetes.io/projected/3a1d01b5-f345-4aa5-88b0-a64c534f661c-kube-api-access-w6p9f\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.917828 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.966572 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.966629 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-config-data\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.966659 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-scripts\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.966701 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6p9f\" (UniqueName: \"kubernetes.io/projected/3a1d01b5-f345-4aa5-88b0-a64c534f661c-kube-api-access-w6p9f\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.971513 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.973082 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-scripts\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.973553 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-config-data\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:14 crc kubenswrapper[4922]: I0929 22:48:14.984822 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6p9f\" (UniqueName: \"kubernetes.io/projected/3a1d01b5-f345-4aa5-88b0-a64c534f661c-kube-api-access-w6p9f\") pod \"nova-cell1-conductor-db-sync-x2kgq\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.108806 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.123511 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-mqchm"] Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.132592 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:48:15 crc kubenswrapper[4922]: W0929 22:48:15.139076 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f316176_d569_40e2_a666_06d83e6bb959.slice/crio-431509b486c53b1854dcfa581e39427e3b7603f91a1576c78f71218d8846586a WatchSource:0}: Error finding container 431509b486c53b1854dcfa581e39427e3b7603f91a1576c78f71218d8846586a: Status 404 returned error can't find the container with id 431509b486c53b1854dcfa581e39427e3b7603f91a1576c78f71218d8846586a Sep 29 22:48:15 crc kubenswrapper[4922]: W0929 22:48:15.142776 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod094ff0cc_5eda_4a89_96a7_e8067418c9e0.slice/crio-bb8c1bcaed35fd97a2f78381614012647bab59bac7012366e9a655df98a7bd90 WatchSource:0}: Error finding container bb8c1bcaed35fd97a2f78381614012647bab59bac7012366e9a655df98a7bd90: Status 404 returned error can't find the container with id bb8c1bcaed35fd97a2f78381614012647bab59bac7012366e9a655df98a7bd90 Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.288713 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.601886 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-x2kgq"] Sep 29 22:48:15 crc kubenswrapper[4922]: W0929 22:48:15.605273 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a1d01b5_f345_4aa5_88b0_a64c534f661c.slice/crio-3092f7451e6ff8d961b25cefd9b0c54cdcf2a036e2caba163fce3423ca9d50bf WatchSource:0}: Error finding container 3092f7451e6ff8d961b25cefd9b0c54cdcf2a036e2caba163fce3423ca9d50bf: Status 404 returned error can't find the container with id 3092f7451e6ff8d961b25cefd9b0c54cdcf2a036e2caba163fce3423ca9d50bf Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.668704 4922 generic.go:334] "Generic (PLEG): container finished" podID="5f316176-d569-40e2-a666-06d83e6bb959" containerID="7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e" exitCode=0 Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.668767 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" event={"ID":"5f316176-d569-40e2-a666-06d83e6bb959","Type":"ContainerDied","Data":"7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e"} Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.668791 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" event={"ID":"5f316176-d569-40e2-a666-06d83e6bb959","Type":"ContainerStarted","Data":"431509b486c53b1854dcfa581e39427e3b7603f91a1576c78f71218d8846586a"} Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.679647 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-x2kgq" event={"ID":"3a1d01b5-f345-4aa5-88b0-a64c534f661c","Type":"ContainerStarted","Data":"3092f7451e6ff8d961b25cefd9b0c54cdcf2a036e2caba163fce3423ca9d50bf"} Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.681593 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"094ff0cc-5eda-4a89-96a7-e8067418c9e0","Type":"ContainerStarted","Data":"bb8c1bcaed35fd97a2f78381614012647bab59bac7012366e9a655df98a7bd90"} Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.697418 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6e877b64-fb68-493e-8d8a-a685dc71ef6f","Type":"ContainerStarted","Data":"7f488c4f56bad144474ec11679406aba9830edbf6d2855e9658303d61bcae27a"} Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.704610 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9fca9a1-0875-4ec4-a731-067e1a80273f","Type":"ContainerStarted","Data":"e9566606dc36ba615cac66aee7601116803593c547c5032092099e7d027aae9a"} Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.708147 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a032eb9f-381d-4249-b7ca-2e627f961119","Type":"ContainerStarted","Data":"fe92aa566b934d5aa76d822ed36da2c794fd636e9b5e64e499a531a2f67b0e76"} Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.720096 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mqg7f" event={"ID":"c069ee2e-ba38-4d12-8090-81842b86051a","Type":"ContainerStarted","Data":"4ac9e5d78eb21ed1711732f74e2d50ed5359b5fd04777f879f901b6d931572dc"} Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.720150 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mqg7f" event={"ID":"c069ee2e-ba38-4d12-8090-81842b86051a","Type":"ContainerStarted","Data":"a1656826be20fe29912b31fc4d3c4dec3895a9a8063193a85a71296ec7508836"} Sep 29 22:48:15 crc kubenswrapper[4922]: I0929 22:48:15.743217 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-mqg7f" podStartSLOduration=2.7431987209999997 podStartE2EDuration="2.743198721s" podCreationTimestamp="2025-09-29 22:48:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:15.7342575 +0000 UTC m=+1300.044546323" watchObservedRunningTime="2025-09-29 22:48:15.743198721 +0000 UTC m=+1300.053487534" Sep 29 22:48:16 crc kubenswrapper[4922]: I0929 22:48:16.732067 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" event={"ID":"5f316176-d569-40e2-a666-06d83e6bb959","Type":"ContainerStarted","Data":"3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1"} Sep 29 22:48:16 crc kubenswrapper[4922]: I0929 22:48:16.733511 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:16 crc kubenswrapper[4922]: I0929 22:48:16.736214 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-x2kgq" event={"ID":"3a1d01b5-f345-4aa5-88b0-a64c534f661c","Type":"ContainerStarted","Data":"8a4f1a17cc7281e316fc945ca04d33391147a434777cd42f9f8fecaa94adbeb0"} Sep 29 22:48:16 crc kubenswrapper[4922]: I0929 22:48:16.760785 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" podStartSLOduration=2.760771431 podStartE2EDuration="2.760771431s" podCreationTimestamp="2025-09-29 22:48:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:16.754344382 +0000 UTC m=+1301.064633195" watchObservedRunningTime="2025-09-29 22:48:16.760771431 +0000 UTC m=+1301.071060244" Sep 29 22:48:16 crc kubenswrapper[4922]: I0929 22:48:16.777038 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-x2kgq" podStartSLOduration=2.777023872 podStartE2EDuration="2.777023872s" podCreationTimestamp="2025-09-29 22:48:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:16.769930407 +0000 UTC m=+1301.080219220" watchObservedRunningTime="2025-09-29 22:48:16.777023872 +0000 UTC m=+1301.087312685" Sep 29 22:48:17 crc kubenswrapper[4922]: I0929 22:48:17.881348 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:48:17 crc kubenswrapper[4922]: I0929 22:48:17.896506 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.757381 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"094ff0cc-5eda-4a89-96a7-e8067418c9e0","Type":"ContainerStarted","Data":"0905da41990a3b6e384c72d2a0386ef4d77931396bc0710a2e410cb7138434b4"} Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.757719 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"094ff0cc-5eda-4a89-96a7-e8067418c9e0","Type":"ContainerStarted","Data":"36936cb1aa467c910ae432fb924d1475da2b8ee186bceff2f106a66832818198"} Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.757672 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerName="nova-metadata-log" containerID="cri-o://36936cb1aa467c910ae432fb924d1475da2b8ee186bceff2f106a66832818198" gracePeriod=30 Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.757874 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerName="nova-metadata-metadata" containerID="cri-o://0905da41990a3b6e384c72d2a0386ef4d77931396bc0710a2e410cb7138434b4" gracePeriod=30 Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.761992 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6e877b64-fb68-493e-8d8a-a685dc71ef6f","Type":"ContainerStarted","Data":"0c3880a109f995535e3afd72fb036842e53fdc3dc4f0172ff0ad69a9ee2e70e2"} Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.765785 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9fca9a1-0875-4ec4-a731-067e1a80273f","Type":"ContainerStarted","Data":"a50969413376194c2021eca89e536c45a1634321210da48c9844da0777618af3"} Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.765810 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9fca9a1-0875-4ec4-a731-067e1a80273f","Type":"ContainerStarted","Data":"a397940d0aea728b384bcce5ef5e521e314da1dca2a049bc33ac790cfd514f8b"} Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.769150 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a032eb9f-381d-4249-b7ca-2e627f961119","Type":"ContainerStarted","Data":"48508a82337e926742cebbd69b7faec4f8934755f4c3a2de0b0d1596050a5f5a"} Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.769830 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="a032eb9f-381d-4249-b7ca-2e627f961119" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://48508a82337e926742cebbd69b7faec4f8934755f4c3a2de0b0d1596050a5f5a" gracePeriod=30 Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.778942 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.7889193300000001 podStartE2EDuration="4.77893051s" podCreationTimestamp="2025-09-29 22:48:14 +0000 UTC" firstStartedPulling="2025-09-29 22:48:15.147354536 +0000 UTC m=+1299.457643349" lastFinishedPulling="2025-09-29 22:48:18.137365716 +0000 UTC m=+1302.447654529" observedRunningTime="2025-09-29 22:48:18.773606078 +0000 UTC m=+1303.083894881" watchObservedRunningTime="2025-09-29 22:48:18.77893051 +0000 UTC m=+1303.089219323" Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.799988 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.522164872 podStartE2EDuration="5.799966669s" podCreationTimestamp="2025-09-29 22:48:13 +0000 UTC" firstStartedPulling="2025-09-29 22:48:14.842675332 +0000 UTC m=+1299.152964145" lastFinishedPulling="2025-09-29 22:48:18.120477119 +0000 UTC m=+1302.430765942" observedRunningTime="2025-09-29 22:48:18.791110601 +0000 UTC m=+1303.101399404" watchObservedRunningTime="2025-09-29 22:48:18.799966669 +0000 UTC m=+1303.110255482" Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.831357 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.024296712 podStartE2EDuration="4.831298823s" podCreationTimestamp="2025-09-29 22:48:14 +0000 UTC" firstStartedPulling="2025-09-29 22:48:15.31718961 +0000 UTC m=+1299.627478423" lastFinishedPulling="2025-09-29 22:48:18.124191711 +0000 UTC m=+1302.434480534" observedRunningTime="2025-09-29 22:48:18.815727799 +0000 UTC m=+1303.126016612" watchObservedRunningTime="2025-09-29 22:48:18.831298823 +0000 UTC m=+1303.141587636" Sep 29 22:48:18 crc kubenswrapper[4922]: I0929 22:48:18.836090 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.654712746 podStartE2EDuration="5.836071151s" podCreationTimestamp="2025-09-29 22:48:13 +0000 UTC" firstStartedPulling="2025-09-29 22:48:14.937595356 +0000 UTC m=+1299.247884169" lastFinishedPulling="2025-09-29 22:48:18.118953761 +0000 UTC m=+1302.429242574" observedRunningTime="2025-09-29 22:48:18.831911088 +0000 UTC m=+1303.142199911" watchObservedRunningTime="2025-09-29 22:48:18.836071151 +0000 UTC m=+1303.146359964" Sep 29 22:48:19 crc kubenswrapper[4922]: I0929 22:48:19.364524 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 22:48:19 crc kubenswrapper[4922]: I0929 22:48:19.537821 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 22:48:19 crc kubenswrapper[4922]: I0929 22:48:19.537887 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 22:48:19 crc kubenswrapper[4922]: I0929 22:48:19.562529 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:19 crc kubenswrapper[4922]: I0929 22:48:19.779805 4922 generic.go:334] "Generic (PLEG): container finished" podID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerID="36936cb1aa467c910ae432fb924d1475da2b8ee186bceff2f106a66832818198" exitCode=143 Sep 29 22:48:19 crc kubenswrapper[4922]: I0929 22:48:19.779929 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"094ff0cc-5eda-4a89-96a7-e8067418c9e0","Type":"ContainerDied","Data":"36936cb1aa467c910ae432fb924d1475da2b8ee186bceff2f106a66832818198"} Sep 29 22:48:22 crc kubenswrapper[4922]: I0929 22:48:22.825318 4922 generic.go:334] "Generic (PLEG): container finished" podID="3a1d01b5-f345-4aa5-88b0-a64c534f661c" containerID="8a4f1a17cc7281e316fc945ca04d33391147a434777cd42f9f8fecaa94adbeb0" exitCode=0 Sep 29 22:48:22 crc kubenswrapper[4922]: I0929 22:48:22.825425 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-x2kgq" event={"ID":"3a1d01b5-f345-4aa5-88b0-a64c534f661c","Type":"ContainerDied","Data":"8a4f1a17cc7281e316fc945ca04d33391147a434777cd42f9f8fecaa94adbeb0"} Sep 29 22:48:23 crc kubenswrapper[4922]: I0929 22:48:23.841901 4922 generic.go:334] "Generic (PLEG): container finished" podID="c069ee2e-ba38-4d12-8090-81842b86051a" containerID="4ac9e5d78eb21ed1711732f74e2d50ed5359b5fd04777f879f901b6d931572dc" exitCode=0 Sep 29 22:48:23 crc kubenswrapper[4922]: I0929 22:48:23.842123 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mqg7f" event={"ID":"c069ee2e-ba38-4d12-8090-81842b86051a","Type":"ContainerDied","Data":"4ac9e5d78eb21ed1711732f74e2d50ed5359b5fd04777f879f901b6d931572dc"} Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.282238 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.282628 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.364444 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.369575 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.446186 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.497002 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-config-data\") pod \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.497084 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-combined-ca-bundle\") pod \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.497119 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6p9f\" (UniqueName: \"kubernetes.io/projected/3a1d01b5-f345-4aa5-88b0-a64c534f661c-kube-api-access-w6p9f\") pod \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.497373 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-scripts\") pod \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\" (UID: \"3a1d01b5-f345-4aa5-88b0-a64c534f661c\") " Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.504931 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a1d01b5-f345-4aa5-88b0-a64c534f661c-kube-api-access-w6p9f" (OuterVolumeSpecName: "kube-api-access-w6p9f") pod "3a1d01b5-f345-4aa5-88b0-a64c534f661c" (UID: "3a1d01b5-f345-4aa5-88b0-a64c534f661c"). InnerVolumeSpecName "kube-api-access-w6p9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.517143 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-scripts" (OuterVolumeSpecName: "scripts") pod "3a1d01b5-f345-4aa5-88b0-a64c534f661c" (UID: "3a1d01b5-f345-4aa5-88b0-a64c534f661c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.537252 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-config-data" (OuterVolumeSpecName: "config-data") pod "3a1d01b5-f345-4aa5-88b0-a64c534f661c" (UID: "3a1d01b5-f345-4aa5-88b0-a64c534f661c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.546339 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a1d01b5-f345-4aa5-88b0-a64c534f661c" (UID: "3a1d01b5-f345-4aa5-88b0-a64c534f661c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.573630 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.599360 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.599402 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.599412 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a1d01b5-f345-4aa5-88b0-a64c534f661c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.599423 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6p9f\" (UniqueName: \"kubernetes.io/projected/3a1d01b5-f345-4aa5-88b0-a64c534f661c-kube-api-access-w6p9f\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.648156 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-njkfr"] Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.648839 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" podUID="9cf777ed-7cc9-4035-8064-34cfb8b5af7d" containerName="dnsmasq-dns" containerID="cri-o://9a8238d677313fdeb8aeaabe1fa9e8a3c61bbbb36229e993ecda3a2986dd92bf" gracePeriod=10 Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.845726 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.854419 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" event={"ID":"9cf777ed-7cc9-4035-8064-34cfb8b5af7d","Type":"ContainerDied","Data":"9a8238d677313fdeb8aeaabe1fa9e8a3c61bbbb36229e993ecda3a2986dd92bf"} Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.854427 4922 generic.go:334] "Generic (PLEG): container finished" podID="9cf777ed-7cc9-4035-8064-34cfb8b5af7d" containerID="9a8238d677313fdeb8aeaabe1fa9e8a3c61bbbb36229e993ecda3a2986dd92bf" exitCode=0 Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.858010 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-x2kgq" event={"ID":"3a1d01b5-f345-4aa5-88b0-a64c534f661c","Type":"ContainerDied","Data":"3092f7451e6ff8d961b25cefd9b0c54cdcf2a036e2caba163fce3423ca9d50bf"} Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.858031 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3092f7451e6ff8d961b25cefd9b0c54cdcf2a036e2caba163fce3423ca9d50bf" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.858095 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-x2kgq" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.921083 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.986416 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 22:48:24 crc kubenswrapper[4922]: E0929 22:48:24.987718 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a1d01b5-f345-4aa5-88b0-a64c534f661c" containerName="nova-cell1-conductor-db-sync" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.987769 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a1d01b5-f345-4aa5-88b0-a64c534f661c" containerName="nova-cell1-conductor-db-sync" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.988077 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a1d01b5-f345-4aa5-88b0-a64c534f661c" containerName="nova-cell1-conductor-db-sync" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.988717 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:24 crc kubenswrapper[4922]: I0929 22:48:24.991145 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.005696 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.110682 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.111027 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfqln\" (UniqueName: \"kubernetes.io/projected/bf1c4a85-458f-4412-ae77-af6d87370b62-kube-api-access-nfqln\") pod \"nova-cell1-conductor-0\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.111101 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.114114 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.212993 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-config\") pod \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.213052 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-nb\") pod \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.213222 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-svc\") pod \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.213302 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-sb\") pod \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.213351 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tvzs\" (UniqueName: \"kubernetes.io/projected/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-kube-api-access-4tvzs\") pod \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.213443 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-swift-storage-0\") pod \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\" (UID: \"9cf777ed-7cc9-4035-8064-34cfb8b5af7d\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.213782 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfqln\" (UniqueName: \"kubernetes.io/projected/bf1c4a85-458f-4412-ae77-af6d87370b62-kube-api-access-nfqln\") pod \"nova-cell1-conductor-0\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.213877 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.214000 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.223605 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.224591 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-kube-api-access-4tvzs" (OuterVolumeSpecName: "kube-api-access-4tvzs") pod "9cf777ed-7cc9-4035-8064-34cfb8b5af7d" (UID: "9cf777ed-7cc9-4035-8064-34cfb8b5af7d"). InnerVolumeSpecName "kube-api-access-4tvzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.224803 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.246056 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfqln\" (UniqueName: \"kubernetes.io/projected/bf1c4a85-458f-4412-ae77-af6d87370b62-kube-api-access-nfqln\") pod \"nova-cell1-conductor-0\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.275297 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-config" (OuterVolumeSpecName: "config") pod "9cf777ed-7cc9-4035-8064-34cfb8b5af7d" (UID: "9cf777ed-7cc9-4035-8064-34cfb8b5af7d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.298659 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9cf777ed-7cc9-4035-8064-34cfb8b5af7d" (UID: "9cf777ed-7cc9-4035-8064-34cfb8b5af7d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.307199 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9cf777ed-7cc9-4035-8064-34cfb8b5af7d" (UID: "9cf777ed-7cc9-4035-8064-34cfb8b5af7d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.316749 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.318889 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tvzs\" (UniqueName: \"kubernetes.io/projected/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-kube-api-access-4tvzs\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.318910 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.318919 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.318929 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.354341 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9cf777ed-7cc9-4035-8064-34cfb8b5af7d" (UID: "9cf777ed-7cc9-4035-8064-34cfb8b5af7d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.365622 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.182:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.366109 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.182:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.381746 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9cf777ed-7cc9-4035-8064-34cfb8b5af7d" (UID: "9cf777ed-7cc9-4035-8064-34cfb8b5af7d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.420221 4922 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.420257 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9cf777ed-7cc9-4035-8064-34cfb8b5af7d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.433891 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.625759 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fx2t\" (UniqueName: \"kubernetes.io/projected/c069ee2e-ba38-4d12-8090-81842b86051a-kube-api-access-5fx2t\") pod \"c069ee2e-ba38-4d12-8090-81842b86051a\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.625853 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-combined-ca-bundle\") pod \"c069ee2e-ba38-4d12-8090-81842b86051a\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.628528 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-config-data\") pod \"c069ee2e-ba38-4d12-8090-81842b86051a\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.629636 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-scripts\") pod \"c069ee2e-ba38-4d12-8090-81842b86051a\" (UID: \"c069ee2e-ba38-4d12-8090-81842b86051a\") " Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.652241 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-scripts" (OuterVolumeSpecName: "scripts") pod "c069ee2e-ba38-4d12-8090-81842b86051a" (UID: "c069ee2e-ba38-4d12-8090-81842b86051a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.652519 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c069ee2e-ba38-4d12-8090-81842b86051a-kube-api-access-5fx2t" (OuterVolumeSpecName: "kube-api-access-5fx2t") pod "c069ee2e-ba38-4d12-8090-81842b86051a" (UID: "c069ee2e-ba38-4d12-8090-81842b86051a"). InnerVolumeSpecName "kube-api-access-5fx2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.655660 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c069ee2e-ba38-4d12-8090-81842b86051a" (UID: "c069ee2e-ba38-4d12-8090-81842b86051a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.656746 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-config-data" (OuterVolumeSpecName: "config-data") pod "c069ee2e-ba38-4d12-8090-81842b86051a" (UID: "c069ee2e-ba38-4d12-8090-81842b86051a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.731821 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fx2t\" (UniqueName: \"kubernetes.io/projected/c069ee2e-ba38-4d12-8090-81842b86051a-kube-api-access-5fx2t\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.731849 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.731862 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.731873 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c069ee2e-ba38-4d12-8090-81842b86051a-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.818246 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.868508 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" event={"ID":"9cf777ed-7cc9-4035-8064-34cfb8b5af7d","Type":"ContainerDied","Data":"b43de1f36b88e11a998847ea123683f530f5261c3f9959929aeb2e340352c53f"} Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.868574 4922 scope.go:117] "RemoveContainer" containerID="9a8238d677313fdeb8aeaabe1fa9e8a3c61bbbb36229e993ecda3a2986dd92bf" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.868700 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-njkfr" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.872564 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-mqg7f" event={"ID":"c069ee2e-ba38-4d12-8090-81842b86051a","Type":"ContainerDied","Data":"a1656826be20fe29912b31fc4d3c4dec3895a9a8063193a85a71296ec7508836"} Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.872587 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1656826be20fe29912b31fc4d3c4dec3895a9a8063193a85a71296ec7508836" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.872626 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-mqg7f" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.886200 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bf1c4a85-458f-4412-ae77-af6d87370b62","Type":"ContainerStarted","Data":"5be570946029cf7c41009a06d35d671773276440570d4369de14f9fa8279aa60"} Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.889211 4922 scope.go:117] "RemoveContainer" containerID="6ebe0ac59859be244eb03a2e5ad1eb93a7f469e8ffaf1914c8e7baa6a65a0fec" Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.929210 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-njkfr"] Sep 29 22:48:25 crc kubenswrapper[4922]: I0929 22:48:25.943051 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-njkfr"] Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.164779 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.174767 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.175055 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-log" containerID="cri-o://a397940d0aea728b384bcce5ef5e521e314da1dca2a049bc33ac790cfd514f8b" gracePeriod=30 Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.175299 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-api" containerID="cri-o://a50969413376194c2021eca89e536c45a1634321210da48c9844da0777618af3" gracePeriod=30 Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.433158 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cf777ed-7cc9-4035-8064-34cfb8b5af7d" path="/var/lib/kubelet/pods/9cf777ed-7cc9-4035-8064-34cfb8b5af7d/volumes" Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.904100 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bf1c4a85-458f-4412-ae77-af6d87370b62","Type":"ContainerStarted","Data":"f2294de9bd698e817730f095cb2af7a0db1eeff25afde1767c5f409c1a682198"} Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.904317 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.911006 4922 generic.go:334] "Generic (PLEG): container finished" podID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerID="a397940d0aea728b384bcce5ef5e521e314da1dca2a049bc33ac790cfd514f8b" exitCode=143 Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.911201 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9fca9a1-0875-4ec4-a731-067e1a80273f","Type":"ContainerDied","Data":"a397940d0aea728b384bcce5ef5e521e314da1dca2a049bc33ac790cfd514f8b"} Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.911215 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="6e877b64-fb68-493e-8d8a-a685dc71ef6f" containerName="nova-scheduler-scheduler" containerID="cri-o://0c3880a109f995535e3afd72fb036842e53fdc3dc4f0172ff0ad69a9ee2e70e2" gracePeriod=30 Sep 29 22:48:26 crc kubenswrapper[4922]: I0929 22:48:26.931468 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.931441791 podStartE2EDuration="2.931441791s" podCreationTimestamp="2025-09-29 22:48:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:26.918529972 +0000 UTC m=+1311.228818785" watchObservedRunningTime="2025-09-29 22:48:26.931441791 +0000 UTC m=+1311.241730604" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.114469 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.114785 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="bd0ad759-f12c-454b-b9e3-c2a58ccf74e3" containerName="kube-state-metrics" containerID="cri-o://9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891" gracePeriod=30 Sep 29 22:48:29 crc kubenswrapper[4922]: E0929 22:48:29.375748 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0c3880a109f995535e3afd72fb036842e53fdc3dc4f0172ff0ad69a9ee2e70e2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 22:48:29 crc kubenswrapper[4922]: E0929 22:48:29.379308 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0c3880a109f995535e3afd72fb036842e53fdc3dc4f0172ff0ad69a9ee2e70e2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 22:48:29 crc kubenswrapper[4922]: E0929 22:48:29.384959 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0c3880a109f995535e3afd72fb036842e53fdc3dc4f0172ff0ad69a9ee2e70e2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 22:48:29 crc kubenswrapper[4922]: E0929 22:48:29.385027 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="6e877b64-fb68-493e-8d8a-a685dc71ef6f" containerName="nova-scheduler-scheduler" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.682671 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.815850 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhhp8\" (UniqueName: \"kubernetes.io/projected/bd0ad759-f12c-454b-b9e3-c2a58ccf74e3-kube-api-access-mhhp8\") pod \"bd0ad759-f12c-454b-b9e3-c2a58ccf74e3\" (UID: \"bd0ad759-f12c-454b-b9e3-c2a58ccf74e3\") " Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.821744 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd0ad759-f12c-454b-b9e3-c2a58ccf74e3-kube-api-access-mhhp8" (OuterVolumeSpecName: "kube-api-access-mhhp8") pod "bd0ad759-f12c-454b-b9e3-c2a58ccf74e3" (UID: "bd0ad759-f12c-454b-b9e3-c2a58ccf74e3"). InnerVolumeSpecName "kube-api-access-mhhp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.919421 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhhp8\" (UniqueName: \"kubernetes.io/projected/bd0ad759-f12c-454b-b9e3-c2a58ccf74e3-kube-api-access-mhhp8\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.945079 4922 generic.go:334] "Generic (PLEG): container finished" podID="bd0ad759-f12c-454b-b9e3-c2a58ccf74e3" containerID="9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891" exitCode=2 Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.945160 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.945191 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bd0ad759-f12c-454b-b9e3-c2a58ccf74e3","Type":"ContainerDied","Data":"9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891"} Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.945223 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bd0ad759-f12c-454b-b9e3-c2a58ccf74e3","Type":"ContainerDied","Data":"2edeecde746156b021aab777be394caf756d544c4d34cc0f3b8c5d2727961c89"} Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.945243 4922 scope.go:117] "RemoveContainer" containerID="9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.946931 4922 generic.go:334] "Generic (PLEG): container finished" podID="6e877b64-fb68-493e-8d8a-a685dc71ef6f" containerID="0c3880a109f995535e3afd72fb036842e53fdc3dc4f0172ff0ad69a9ee2e70e2" exitCode=0 Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.946974 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6e877b64-fb68-493e-8d8a-a685dc71ef6f","Type":"ContainerDied","Data":"0c3880a109f995535e3afd72fb036842e53fdc3dc4f0172ff0ad69a9ee2e70e2"} Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.979569 4922 scope.go:117] "RemoveContainer" containerID="9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.979683 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:48:29 crc kubenswrapper[4922]: E0929 22:48:29.979895 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891\": container with ID starting with 9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891 not found: ID does not exist" containerID="9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.979935 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891"} err="failed to get container status \"9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891\": rpc error: code = NotFound desc = could not find container \"9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891\": container with ID starting with 9d95d21592141bb2cf1e757fe18d7572cba6221e3c659322c408dcf0f9fbc891 not found: ID does not exist" Sep 29 22:48:29 crc kubenswrapper[4922]: I0929 22:48:29.987475 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.010968 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:48:30 crc kubenswrapper[4922]: E0929 22:48:30.011594 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd0ad759-f12c-454b-b9e3-c2a58ccf74e3" containerName="kube-state-metrics" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.011621 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd0ad759-f12c-454b-b9e3-c2a58ccf74e3" containerName="kube-state-metrics" Sep 29 22:48:30 crc kubenswrapper[4922]: E0929 22:48:30.011641 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf777ed-7cc9-4035-8064-34cfb8b5af7d" containerName="init" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.011650 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf777ed-7cc9-4035-8064-34cfb8b5af7d" containerName="init" Sep 29 22:48:30 crc kubenswrapper[4922]: E0929 22:48:30.011671 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf777ed-7cc9-4035-8064-34cfb8b5af7d" containerName="dnsmasq-dns" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.011679 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf777ed-7cc9-4035-8064-34cfb8b5af7d" containerName="dnsmasq-dns" Sep 29 22:48:30 crc kubenswrapper[4922]: E0929 22:48:30.011715 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c069ee2e-ba38-4d12-8090-81842b86051a" containerName="nova-manage" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.011723 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c069ee2e-ba38-4d12-8090-81842b86051a" containerName="nova-manage" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.011948 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c069ee2e-ba38-4d12-8090-81842b86051a" containerName="nova-manage" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.011970 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf777ed-7cc9-4035-8064-34cfb8b5af7d" containerName="dnsmasq-dns" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.011993 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd0ad759-f12c-454b-b9e3-c2a58ccf74e3" containerName="kube-state-metrics" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.012787 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.014623 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.014962 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.020039 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.045538 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.128677 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.128786 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.128953 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.129044 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmlwv\" (UniqueName: \"kubernetes.io/projected/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-api-access-gmlwv\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.229958 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-config-data\") pod \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.230255 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-combined-ca-bundle\") pod \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.230421 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bps4v\" (UniqueName: \"kubernetes.io/projected/6e877b64-fb68-493e-8d8a-a685dc71ef6f-kube-api-access-bps4v\") pod \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\" (UID: \"6e877b64-fb68-493e-8d8a-a685dc71ef6f\") " Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.230706 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.230882 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.231036 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmlwv\" (UniqueName: \"kubernetes.io/projected/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-api-access-gmlwv\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.231188 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.235148 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e877b64-fb68-493e-8d8a-a685dc71ef6f-kube-api-access-bps4v" (OuterVolumeSpecName: "kube-api-access-bps4v") pod "6e877b64-fb68-493e-8d8a-a685dc71ef6f" (UID: "6e877b64-fb68-493e-8d8a-a685dc71ef6f"). InnerVolumeSpecName "kube-api-access-bps4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.235609 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.236510 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.237118 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.247565 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmlwv\" (UniqueName: \"kubernetes.io/projected/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-api-access-gmlwv\") pod \"kube-state-metrics-0\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.257899 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-config-data" (OuterVolumeSpecName: "config-data") pod "6e877b64-fb68-493e-8d8a-a685dc71ef6f" (UID: "6e877b64-fb68-493e-8d8a-a685dc71ef6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.281508 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e877b64-fb68-493e-8d8a-a685dc71ef6f" (UID: "6e877b64-fb68-493e-8d8a-a685dc71ef6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.331952 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.331984 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e877b64-fb68-493e-8d8a-a685dc71ef6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.331995 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bps4v\" (UniqueName: \"kubernetes.io/projected/6e877b64-fb68-493e-8d8a-a685dc71ef6f-kube-api-access-bps4v\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.342316 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.351404 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.437945 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd0ad759-f12c-454b-b9e3-c2a58ccf74e3" path="/var/lib/kubelet/pods/bd0ad759-f12c-454b-b9e3-c2a58ccf74e3/volumes" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.838024 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:48:30 crc kubenswrapper[4922]: W0929 22:48:30.848434 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c5eef11_d4e0_43cd_b305_c427f85d173a.slice/crio-d454c4f40de7202a0be0dc9b199b91750d6b7978940474780a460ab06d38d087 WatchSource:0}: Error finding container d454c4f40de7202a0be0dc9b199b91750d6b7978940474780a460ab06d38d087: Status 404 returned error can't find the container with id d454c4f40de7202a0be0dc9b199b91750d6b7978940474780a460ab06d38d087 Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.960007 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1c5eef11-d4e0-43cd-b305-c427f85d173a","Type":"ContainerStarted","Data":"d454c4f40de7202a0be0dc9b199b91750d6b7978940474780a460ab06d38d087"} Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.963490 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.963486 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"6e877b64-fb68-493e-8d8a-a685dc71ef6f","Type":"ContainerDied","Data":"7f488c4f56bad144474ec11679406aba9830edbf6d2855e9658303d61bcae27a"} Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.963656 4922 scope.go:117] "RemoveContainer" containerID="0c3880a109f995535e3afd72fb036842e53fdc3dc4f0172ff0ad69a9ee2e70e2" Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.967320 4922 generic.go:334] "Generic (PLEG): container finished" podID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerID="a50969413376194c2021eca89e536c45a1634321210da48c9844da0777618af3" exitCode=0 Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.967636 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9fca9a1-0875-4ec4-a731-067e1a80273f","Type":"ContainerDied","Data":"a50969413376194c2021eca89e536c45a1634321210da48c9844da0777618af3"} Sep 29 22:48:30 crc kubenswrapper[4922]: I0929 22:48:30.990124 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.009691 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.015113 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:48:31 crc kubenswrapper[4922]: E0929 22:48:31.015550 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e877b64-fb68-493e-8d8a-a685dc71ef6f" containerName="nova-scheduler-scheduler" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.015563 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e877b64-fb68-493e-8d8a-a685dc71ef6f" containerName="nova-scheduler-scheduler" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.015781 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e877b64-fb68-493e-8d8a-a685dc71ef6f" containerName="nova-scheduler-scheduler" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.016478 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.021115 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.026535 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.044428 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.044685 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="ceilometer-central-agent" containerID="cri-o://8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93" gracePeriod=30 Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.045067 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="proxy-httpd" containerID="cri-o://008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259" gracePeriod=30 Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.045117 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="sg-core" containerID="cri-o://c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a" gracePeriod=30 Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.045148 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="ceilometer-notification-agent" containerID="cri-o://e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11" gracePeriod=30 Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.076517 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.146277 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkzff\" (UniqueName: \"kubernetes.io/projected/b6cf5218-da1f-4979-a8b2-1f4c49981307-kube-api-access-hkzff\") pod \"nova-scheduler-0\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.146348 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-config-data\") pod \"nova-scheduler-0\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.146445 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.247241 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86hdz\" (UniqueName: \"kubernetes.io/projected/c9fca9a1-0875-4ec4-a731-067e1a80273f-kube-api-access-86hdz\") pod \"c9fca9a1-0875-4ec4-a731-067e1a80273f\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.247372 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-config-data\") pod \"c9fca9a1-0875-4ec4-a731-067e1a80273f\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.247451 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-combined-ca-bundle\") pod \"c9fca9a1-0875-4ec4-a731-067e1a80273f\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.247691 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9fca9a1-0875-4ec4-a731-067e1a80273f-logs\") pod \"c9fca9a1-0875-4ec4-a731-067e1a80273f\" (UID: \"c9fca9a1-0875-4ec4-a731-067e1a80273f\") " Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.248121 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9fca9a1-0875-4ec4-a731-067e1a80273f-logs" (OuterVolumeSpecName: "logs") pod "c9fca9a1-0875-4ec4-a731-067e1a80273f" (UID: "c9fca9a1-0875-4ec4-a731-067e1a80273f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.248124 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkzff\" (UniqueName: \"kubernetes.io/projected/b6cf5218-da1f-4979-a8b2-1f4c49981307-kube-api-access-hkzff\") pod \"nova-scheduler-0\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.248206 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-config-data\") pod \"nova-scheduler-0\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.248301 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.248407 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9fca9a1-0875-4ec4-a731-067e1a80273f-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.251048 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9fca9a1-0875-4ec4-a731-067e1a80273f-kube-api-access-86hdz" (OuterVolumeSpecName: "kube-api-access-86hdz") pod "c9fca9a1-0875-4ec4-a731-067e1a80273f" (UID: "c9fca9a1-0875-4ec4-a731-067e1a80273f"). InnerVolumeSpecName "kube-api-access-86hdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.254929 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.265074 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-config-data\") pod \"nova-scheduler-0\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.268450 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkzff\" (UniqueName: \"kubernetes.io/projected/b6cf5218-da1f-4979-a8b2-1f4c49981307-kube-api-access-hkzff\") pod \"nova-scheduler-0\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.289088 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-config-data" (OuterVolumeSpecName: "config-data") pod "c9fca9a1-0875-4ec4-a731-067e1a80273f" (UID: "c9fca9a1-0875-4ec4-a731-067e1a80273f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.308662 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9fca9a1-0875-4ec4-a731-067e1a80273f" (UID: "c9fca9a1-0875-4ec4-a731-067e1a80273f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.342793 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.350846 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.350873 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9fca9a1-0875-4ec4-a731-067e1a80273f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.350885 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86hdz\" (UniqueName: \"kubernetes.io/projected/c9fca9a1-0875-4ec4-a731-067e1a80273f-kube-api-access-86hdz\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.827149 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:48:31 crc kubenswrapper[4922]: W0929 22:48:31.830757 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6cf5218_da1f_4979_a8b2_1f4c49981307.slice/crio-4a302d7aa49b23c112d5f0d3b45ca48784473e7bb1050667d946696a61c0c88b WatchSource:0}: Error finding container 4a302d7aa49b23c112d5f0d3b45ca48784473e7bb1050667d946696a61c0c88b: Status 404 returned error can't find the container with id 4a302d7aa49b23c112d5f0d3b45ca48784473e7bb1050667d946696a61c0c88b Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.983545 4922 generic.go:334] "Generic (PLEG): container finished" podID="1d483395-0697-412a-8072-897e32b7d492" containerID="008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259" exitCode=0 Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.983929 4922 generic.go:334] "Generic (PLEG): container finished" podID="1d483395-0697-412a-8072-897e32b7d492" containerID="c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a" exitCode=2 Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.983943 4922 generic.go:334] "Generic (PLEG): container finished" podID="1d483395-0697-412a-8072-897e32b7d492" containerID="8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93" exitCode=0 Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.983623 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerDied","Data":"008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259"} Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.984025 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerDied","Data":"c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a"} Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.984043 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerDied","Data":"8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93"} Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.985654 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b6cf5218-da1f-4979-a8b2-1f4c49981307","Type":"ContainerStarted","Data":"4a302d7aa49b23c112d5f0d3b45ca48784473e7bb1050667d946696a61c0c88b"} Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.988973 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9fca9a1-0875-4ec4-a731-067e1a80273f","Type":"ContainerDied","Data":"e9566606dc36ba615cac66aee7601116803593c547c5032092099e7d027aae9a"} Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.989011 4922 scope.go:117] "RemoveContainer" containerID="a50969413376194c2021eca89e536c45a1634321210da48c9844da0777618af3" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.989029 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.992362 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1c5eef11-d4e0-43cd-b305-c427f85d173a","Type":"ContainerStarted","Data":"9d81b17eb6b803729c871b1ba518d9db5c6074ebf6d349cf4471bdfb5a4bac22"} Sep 29 22:48:31 crc kubenswrapper[4922]: I0929 22:48:31.992565 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.027526 4922 scope.go:117] "RemoveContainer" containerID="a397940d0aea728b384bcce5ef5e521e314da1dca2a049bc33ac790cfd514f8b" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.033593 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.669298094 podStartE2EDuration="3.03357566s" podCreationTimestamp="2025-09-29 22:48:29 +0000 UTC" firstStartedPulling="2025-09-29 22:48:30.850818082 +0000 UTC m=+1315.161106905" lastFinishedPulling="2025-09-29 22:48:31.215095648 +0000 UTC m=+1315.525384471" observedRunningTime="2025-09-29 22:48:32.019678097 +0000 UTC m=+1316.329966910" watchObservedRunningTime="2025-09-29 22:48:32.03357566 +0000 UTC m=+1316.343864493" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.055265 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.078330 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.092948 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:32 crc kubenswrapper[4922]: E0929 22:48:32.093363 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-log" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.093380 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-log" Sep 29 22:48:32 crc kubenswrapper[4922]: E0929 22:48:32.093408 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-api" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.093415 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-api" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.093602 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-api" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.093627 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" containerName="nova-api-log" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.094547 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.094625 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.099575 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.271185 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-config-data\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.272690 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb78l\" (UniqueName: \"kubernetes.io/projected/2db43b28-b025-4538-ab26-cda934938671-kube-api-access-cb78l\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.273222 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.273708 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db43b28-b025-4538-ab26-cda934938671-logs\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.375505 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.375805 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db43b28-b025-4538-ab26-cda934938671-logs\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.375955 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-config-data\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.376024 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb78l\" (UniqueName: \"kubernetes.io/projected/2db43b28-b025-4538-ab26-cda934938671-kube-api-access-cb78l\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.376918 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db43b28-b025-4538-ab26-cda934938671-logs\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.381147 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-config-data\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.382656 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.393981 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb78l\" (UniqueName: \"kubernetes.io/projected/2db43b28-b025-4538-ab26-cda934938671-kube-api-access-cb78l\") pod \"nova-api-0\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.410827 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.450273 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e877b64-fb68-493e-8d8a-a685dc71ef6f" path="/var/lib/kubelet/pods/6e877b64-fb68-493e-8d8a-a685dc71ef6f/volumes" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.450908 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9fca9a1-0875-4ec4-a731-067e1a80273f" path="/var/lib/kubelet/pods/c9fca9a1-0875-4ec4-a731-067e1a80273f/volumes" Sep 29 22:48:32 crc kubenswrapper[4922]: I0929 22:48:32.930368 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:32 crc kubenswrapper[4922]: W0929 22:48:32.939063 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2db43b28_b025_4538_ab26_cda934938671.slice/crio-b108d88e8ec3802913563bfded99814b76421e56d2b9ba7a460026c5b7e47d3d WatchSource:0}: Error finding container b108d88e8ec3802913563bfded99814b76421e56d2b9ba7a460026c5b7e47d3d: Status 404 returned error can't find the container with id b108d88e8ec3802913563bfded99814b76421e56d2b9ba7a460026c5b7e47d3d Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.005760 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db43b28-b025-4538-ab26-cda934938671","Type":"ContainerStarted","Data":"b108d88e8ec3802913563bfded99814b76421e56d2b9ba7a460026c5b7e47d3d"} Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.010253 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b6cf5218-da1f-4979-a8b2-1f4c49981307","Type":"ContainerStarted","Data":"d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9"} Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.036700 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.036676613 podStartE2EDuration="3.036676613s" podCreationTimestamp="2025-09-29 22:48:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:33.028730026 +0000 UTC m=+1317.339018889" watchObservedRunningTime="2025-09-29 22:48:33.036676613 +0000 UTC m=+1317.346965436" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.475599 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.514972 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-scripts\") pod \"1d483395-0697-412a-8072-897e32b7d492\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.515088 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-run-httpd\") pod \"1d483395-0697-412a-8072-897e32b7d492\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.515127 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-log-httpd\") pod \"1d483395-0697-412a-8072-897e32b7d492\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.515189 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-config-data\") pod \"1d483395-0697-412a-8072-897e32b7d492\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.515263 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-combined-ca-bundle\") pod \"1d483395-0697-412a-8072-897e32b7d492\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.515338 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmggd\" (UniqueName: \"kubernetes.io/projected/1d483395-0697-412a-8072-897e32b7d492-kube-api-access-mmggd\") pod \"1d483395-0697-412a-8072-897e32b7d492\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.515381 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-sg-core-conf-yaml\") pod \"1d483395-0697-412a-8072-897e32b7d492\" (UID: \"1d483395-0697-412a-8072-897e32b7d492\") " Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.516261 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1d483395-0697-412a-8072-897e32b7d492" (UID: "1d483395-0697-412a-8072-897e32b7d492"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.516911 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1d483395-0697-412a-8072-897e32b7d492" (UID: "1d483395-0697-412a-8072-897e32b7d492"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.519949 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-scripts" (OuterVolumeSpecName: "scripts") pod "1d483395-0697-412a-8072-897e32b7d492" (UID: "1d483395-0697-412a-8072-897e32b7d492"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.522139 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d483395-0697-412a-8072-897e32b7d492-kube-api-access-mmggd" (OuterVolumeSpecName: "kube-api-access-mmggd") pod "1d483395-0697-412a-8072-897e32b7d492" (UID: "1d483395-0697-412a-8072-897e32b7d492"). InnerVolumeSpecName "kube-api-access-mmggd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.549965 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1d483395-0697-412a-8072-897e32b7d492" (UID: "1d483395-0697-412a-8072-897e32b7d492"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.604553 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-config-data" (OuterVolumeSpecName: "config-data") pod "1d483395-0697-412a-8072-897e32b7d492" (UID: "1d483395-0697-412a-8072-897e32b7d492"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.607293 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d483395-0697-412a-8072-897e32b7d492" (UID: "1d483395-0697-412a-8072-897e32b7d492"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.617955 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.617985 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.618001 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.618013 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d483395-0697-412a-8072-897e32b7d492-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.618025 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.618052 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d483395-0697-412a-8072-897e32b7d492-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:33 crc kubenswrapper[4922]: I0929 22:48:33.618066 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmggd\" (UniqueName: \"kubernetes.io/projected/1d483395-0697-412a-8072-897e32b7d492-kube-api-access-mmggd\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.023948 4922 generic.go:334] "Generic (PLEG): container finished" podID="1d483395-0697-412a-8072-897e32b7d492" containerID="e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11" exitCode=0 Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.024072 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerDied","Data":"e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11"} Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.024132 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1d483395-0697-412a-8072-897e32b7d492","Type":"ContainerDied","Data":"458ad1f893996fa345a319b66b4419812d114eacbd0aa4f879e5ee9f86616f14"} Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.024170 4922 scope.go:117] "RemoveContainer" containerID="008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.024367 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.030601 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db43b28-b025-4538-ab26-cda934938671","Type":"ContainerStarted","Data":"ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c"} Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.030632 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db43b28-b025-4538-ab26-cda934938671","Type":"ContainerStarted","Data":"60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b"} Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.057954 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.057926663 podStartE2EDuration="2.057926663s" podCreationTimestamp="2025-09-29 22:48:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:34.057609995 +0000 UTC m=+1318.367898838" watchObservedRunningTime="2025-09-29 22:48:34.057926663 +0000 UTC m=+1318.368215516" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.061709 4922 scope.go:117] "RemoveContainer" containerID="c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.109152 4922 scope.go:117] "RemoveContainer" containerID="e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.111645 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.142063 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.156951 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:34 crc kubenswrapper[4922]: E0929 22:48:34.157789 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="sg-core" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.157821 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="sg-core" Sep 29 22:48:34 crc kubenswrapper[4922]: E0929 22:48:34.157870 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="proxy-httpd" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.157885 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="proxy-httpd" Sep 29 22:48:34 crc kubenswrapper[4922]: E0929 22:48:34.157906 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="ceilometer-central-agent" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.157918 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="ceilometer-central-agent" Sep 29 22:48:34 crc kubenswrapper[4922]: E0929 22:48:34.157934 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="ceilometer-notification-agent" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.157947 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="ceilometer-notification-agent" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.158243 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="ceilometer-central-agent" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.158269 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="sg-core" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.158308 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="ceilometer-notification-agent" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.158331 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d483395-0697-412a-8072-897e32b7d492" containerName="proxy-httpd" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.160857 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.163104 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.163277 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.163482 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.166205 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.168746 4922 scope.go:117] "RemoveContainer" containerID="8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.195819 4922 scope.go:117] "RemoveContainer" containerID="008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259" Sep 29 22:48:34 crc kubenswrapper[4922]: E0929 22:48:34.196142 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259\": container with ID starting with 008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259 not found: ID does not exist" containerID="008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.196171 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259"} err="failed to get container status \"008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259\": rpc error: code = NotFound desc = could not find container \"008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259\": container with ID starting with 008d61efbe6c583b5e0fa02a20cf5bc89a26b70e99777136db9a3e6bc2dea259 not found: ID does not exist" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.196190 4922 scope.go:117] "RemoveContainer" containerID="c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a" Sep 29 22:48:34 crc kubenswrapper[4922]: E0929 22:48:34.196419 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a\": container with ID starting with c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a not found: ID does not exist" containerID="c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.196444 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a"} err="failed to get container status \"c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a\": rpc error: code = NotFound desc = could not find container \"c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a\": container with ID starting with c8abffecd8eeb7f96ccd1fd003536eb07b63f627f58318b391dd5a0c148f1a8a not found: ID does not exist" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.196459 4922 scope.go:117] "RemoveContainer" containerID="e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11" Sep 29 22:48:34 crc kubenswrapper[4922]: E0929 22:48:34.196700 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11\": container with ID starting with e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11 not found: ID does not exist" containerID="e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.196722 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11"} err="failed to get container status \"e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11\": rpc error: code = NotFound desc = could not find container \"e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11\": container with ID starting with e9dad106c1001f6872023fd2609f6c72d7e93913abff4bfaa5d2d336521d2e11 not found: ID does not exist" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.196734 4922 scope.go:117] "RemoveContainer" containerID="8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93" Sep 29 22:48:34 crc kubenswrapper[4922]: E0929 22:48:34.196946 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93\": container with ID starting with 8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93 not found: ID does not exist" containerID="8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.196967 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93"} err="failed to get container status \"8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93\": rpc error: code = NotFound desc = could not find container \"8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93\": container with ID starting with 8cc1f1a3bfed8ed586df3bc9ae9f1905cefa604b8096fde1ef4b823844858b93 not found: ID does not exist" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.234359 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-config-data\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.234436 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.234457 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95k24\" (UniqueName: \"kubernetes.io/projected/00e87e6a-cbc8-494c-989a-1465b0e31908-kube-api-access-95k24\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.234531 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-log-httpd\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.234552 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-run-httpd\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.234574 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.234647 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-scripts\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.234763 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.336866 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-log-httpd\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.336927 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-run-httpd\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.336973 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.337041 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-scripts\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.337151 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.337230 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-config-data\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.337319 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.337352 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95k24\" (UniqueName: \"kubernetes.io/projected/00e87e6a-cbc8-494c-989a-1465b0e31908-kube-api-access-95k24\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.337366 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-log-httpd\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.337640 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-run-httpd\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.343446 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.343614 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.343742 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-scripts\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.344656 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-config-data\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.348165 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.357268 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95k24\" (UniqueName: \"kubernetes.io/projected/00e87e6a-cbc8-494c-989a-1465b0e31908-kube-api-access-95k24\") pod \"ceilometer-0\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.438452 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d483395-0697-412a-8072-897e32b7d492" path="/var/lib/kubelet/pods/1d483395-0697-412a-8072-897e32b7d492/volumes" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.490997 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:48:34 crc kubenswrapper[4922]: I0929 22:48:34.981290 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:34 crc kubenswrapper[4922]: W0929 22:48:34.989184 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00e87e6a_cbc8_494c_989a_1465b0e31908.slice/crio-6087909a9226695e19010d44f0458944cbf0a039b0e2827f53651f3342a26050 WatchSource:0}: Error finding container 6087909a9226695e19010d44f0458944cbf0a039b0e2827f53651f3342a26050: Status 404 returned error can't find the container with id 6087909a9226695e19010d44f0458944cbf0a039b0e2827f53651f3342a26050 Sep 29 22:48:35 crc kubenswrapper[4922]: I0929 22:48:35.047500 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerStarted","Data":"6087909a9226695e19010d44f0458944cbf0a039b0e2827f53651f3342a26050"} Sep 29 22:48:36 crc kubenswrapper[4922]: I0929 22:48:36.062626 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerStarted","Data":"8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70"} Sep 29 22:48:36 crc kubenswrapper[4922]: I0929 22:48:36.343282 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 22:48:37 crc kubenswrapper[4922]: I0929 22:48:37.079466 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerStarted","Data":"858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418"} Sep 29 22:48:38 crc kubenswrapper[4922]: I0929 22:48:38.137799 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerStarted","Data":"7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a"} Sep 29 22:48:39 crc kubenswrapper[4922]: I0929 22:48:39.153225 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerStarted","Data":"566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445"} Sep 29 22:48:39 crc kubenswrapper[4922]: I0929 22:48:39.153742 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 22:48:39 crc kubenswrapper[4922]: I0929 22:48:39.180948 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.30376377 podStartE2EDuration="5.180930869s" podCreationTimestamp="2025-09-29 22:48:34 +0000 UTC" firstStartedPulling="2025-09-29 22:48:34.99192913 +0000 UTC m=+1319.302217953" lastFinishedPulling="2025-09-29 22:48:38.869096189 +0000 UTC m=+1323.179385052" observedRunningTime="2025-09-29 22:48:39.178100749 +0000 UTC m=+1323.488389602" watchObservedRunningTime="2025-09-29 22:48:39.180930869 +0000 UTC m=+1323.491219672" Sep 29 22:48:40 crc kubenswrapper[4922]: I0929 22:48:40.353312 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 22:48:41 crc kubenswrapper[4922]: I0929 22:48:41.343639 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 22:48:41 crc kubenswrapper[4922]: I0929 22:48:41.372158 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 22:48:42 crc kubenswrapper[4922]: I0929 22:48:42.225759 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 22:48:42 crc kubenswrapper[4922]: I0929 22:48:42.411748 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 22:48:42 crc kubenswrapper[4922]: I0929 22:48:42.411803 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 22:48:43 crc kubenswrapper[4922]: I0929 22:48:43.452746 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 22:48:43 crc kubenswrapper[4922]: I0929 22:48:43.493703 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 22:48:49 crc kubenswrapper[4922]: E0929 22:48:49.092116 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda032eb9f_381d_4249_b7ca_2e627f961119.slice/crio-conmon-48508a82337e926742cebbd69b7faec4f8934755f4c3a2de0b0d1596050a5f5a.scope\": RecentStats: unable to find data in memory cache]" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.271366 4922 generic.go:334] "Generic (PLEG): container finished" podID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerID="0905da41990a3b6e384c72d2a0386ef4d77931396bc0710a2e410cb7138434b4" exitCode=137 Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.271451 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"094ff0cc-5eda-4a89-96a7-e8067418c9e0","Type":"ContainerDied","Data":"0905da41990a3b6e384c72d2a0386ef4d77931396bc0710a2e410cb7138434b4"} Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.271518 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"094ff0cc-5eda-4a89-96a7-e8067418c9e0","Type":"ContainerDied","Data":"bb8c1bcaed35fd97a2f78381614012647bab59bac7012366e9a655df98a7bd90"} Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.271538 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb8c1bcaed35fd97a2f78381614012647bab59bac7012366e9a655df98a7bd90" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.273169 4922 generic.go:334] "Generic (PLEG): container finished" podID="a032eb9f-381d-4249-b7ca-2e627f961119" containerID="48508a82337e926742cebbd69b7faec4f8934755f4c3a2de0b0d1596050a5f5a" exitCode=137 Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.273224 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a032eb9f-381d-4249-b7ca-2e627f961119","Type":"ContainerDied","Data":"48508a82337e926742cebbd69b7faec4f8934755f4c3a2de0b0d1596050a5f5a"} Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.273256 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a032eb9f-381d-4249-b7ca-2e627f961119","Type":"ContainerDied","Data":"fe92aa566b934d5aa76d822ed36da2c794fd636e9b5e64e499a531a2f67b0e76"} Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.273274 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe92aa566b934d5aa76d822ed36da2c794fd636e9b5e64e499a531a2f67b0e76" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.369545 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.376846 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.465028 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-combined-ca-bundle\") pod \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.465185 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s96pr\" (UniqueName: \"kubernetes.io/projected/094ff0cc-5eda-4a89-96a7-e8067418c9e0-kube-api-access-s96pr\") pod \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.465225 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-config-data\") pod \"a032eb9f-381d-4249-b7ca-2e627f961119\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.465353 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-combined-ca-bundle\") pod \"a032eb9f-381d-4249-b7ca-2e627f961119\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.465485 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/094ff0cc-5eda-4a89-96a7-e8067418c9e0-logs\") pod \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.465592 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-727rs\" (UniqueName: \"kubernetes.io/projected/a032eb9f-381d-4249-b7ca-2e627f961119-kube-api-access-727rs\") pod \"a032eb9f-381d-4249-b7ca-2e627f961119\" (UID: \"a032eb9f-381d-4249-b7ca-2e627f961119\") " Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.465625 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-config-data\") pod \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\" (UID: \"094ff0cc-5eda-4a89-96a7-e8067418c9e0\") " Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.466527 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/094ff0cc-5eda-4a89-96a7-e8067418c9e0-logs" (OuterVolumeSpecName: "logs") pod "094ff0cc-5eda-4a89-96a7-e8067418c9e0" (UID: "094ff0cc-5eda-4a89-96a7-e8067418c9e0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.471230 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a032eb9f-381d-4249-b7ca-2e627f961119-kube-api-access-727rs" (OuterVolumeSpecName: "kube-api-access-727rs") pod "a032eb9f-381d-4249-b7ca-2e627f961119" (UID: "a032eb9f-381d-4249-b7ca-2e627f961119"). InnerVolumeSpecName "kube-api-access-727rs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.472166 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/094ff0cc-5eda-4a89-96a7-e8067418c9e0-kube-api-access-s96pr" (OuterVolumeSpecName: "kube-api-access-s96pr") pod "094ff0cc-5eda-4a89-96a7-e8067418c9e0" (UID: "094ff0cc-5eda-4a89-96a7-e8067418c9e0"). InnerVolumeSpecName "kube-api-access-s96pr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.491718 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a032eb9f-381d-4249-b7ca-2e627f961119" (UID: "a032eb9f-381d-4249-b7ca-2e627f961119"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.501779 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-config-data" (OuterVolumeSpecName: "config-data") pod "094ff0cc-5eda-4a89-96a7-e8067418c9e0" (UID: "094ff0cc-5eda-4a89-96a7-e8067418c9e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.511538 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-config-data" (OuterVolumeSpecName: "config-data") pod "a032eb9f-381d-4249-b7ca-2e627f961119" (UID: "a032eb9f-381d-4249-b7ca-2e627f961119"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.513532 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "094ff0cc-5eda-4a89-96a7-e8067418c9e0" (UID: "094ff0cc-5eda-4a89-96a7-e8067418c9e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.568628 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/094ff0cc-5eda-4a89-96a7-e8067418c9e0-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.568678 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-727rs\" (UniqueName: \"kubernetes.io/projected/a032eb9f-381d-4249-b7ca-2e627f961119-kube-api-access-727rs\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.568698 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.568715 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/094ff0cc-5eda-4a89-96a7-e8067418c9e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.568736 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s96pr\" (UniqueName: \"kubernetes.io/projected/094ff0cc-5eda-4a89-96a7-e8067418c9e0-kube-api-access-s96pr\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.568753 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:49 crc kubenswrapper[4922]: I0929 22:48:49.568772 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032eb9f-381d-4249-b7ca-2e627f961119-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.283291 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.283833 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.333247 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.351008 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.363449 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.373064 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.403884 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:48:50 crc kubenswrapper[4922]: E0929 22:48:50.407625 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerName="nova-metadata-metadata" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.407660 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerName="nova-metadata-metadata" Sep 29 22:48:50 crc kubenswrapper[4922]: E0929 22:48:50.407705 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a032eb9f-381d-4249-b7ca-2e627f961119" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.407714 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a032eb9f-381d-4249-b7ca-2e627f961119" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 22:48:50 crc kubenswrapper[4922]: E0929 22:48:50.407749 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerName="nova-metadata-log" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.407760 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerName="nova-metadata-log" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.408953 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a032eb9f-381d-4249-b7ca-2e627f961119" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.408986 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerName="nova-metadata-metadata" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.409006 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" containerName="nova-metadata-log" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.417594 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.426428 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.426733 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.429905 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.440487 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="094ff0cc-5eda-4a89-96a7-e8067418c9e0" path="/var/lib/kubelet/pods/094ff0cc-5eda-4a89-96a7-e8067418c9e0/volumes" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.441728 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a032eb9f-381d-4249-b7ca-2e627f961119" path="/var/lib/kubelet/pods/a032eb9f-381d-4249-b7ca-2e627f961119/volumes" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.454364 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.462745 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.464693 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.467548 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.468485 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.469322 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490319 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmsrv\" (UniqueName: \"kubernetes.io/projected/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-kube-api-access-xmsrv\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490367 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490456 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490493 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490524 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490560 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490615 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490655 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-config-data\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490684 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-logs\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.490739 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5fbh\" (UniqueName: \"kubernetes.io/projected/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-kube-api-access-z5fbh\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.592170 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5fbh\" (UniqueName: \"kubernetes.io/projected/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-kube-api-access-z5fbh\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.593058 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.593248 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmsrv\" (UniqueName: \"kubernetes.io/projected/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-kube-api-access-xmsrv\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.593560 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.593783 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.594045 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.594719 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.595028 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.595236 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-config-data\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.595385 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-logs\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.596105 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-logs\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.598102 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.600632 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.602274 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.603015 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.603052 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.604110 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-config-data\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.604106 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.614977 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5fbh\" (UniqueName: \"kubernetes.io/projected/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-kube-api-access-z5fbh\") pod \"nova-metadata-0\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " pod="openstack/nova-metadata-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.617464 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmsrv\" (UniqueName: \"kubernetes.io/projected/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-kube-api-access-xmsrv\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.746284 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:50 crc kubenswrapper[4922]: I0929 22:48:50.778784 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:48:51 crc kubenswrapper[4922]: I0929 22:48:51.081097 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:48:51 crc kubenswrapper[4922]: I0929 22:48:51.144937 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:48:51 crc kubenswrapper[4922]: I0929 22:48:51.296672 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"215d795e-73e0-44f4-9c4a-3eb67bbe9b08","Type":"ContainerStarted","Data":"6d7d4f2c4cababaa9fea27a59eb2eae5c7dd4d8644745594a9588ceecd84b6f3"} Sep 29 22:48:51 crc kubenswrapper[4922]: I0929 22:48:51.298292 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dfe7291a-aae6-4a8f-9f46-fa4594582dfe","Type":"ContainerStarted","Data":"4762323440fe763bdffe524f4e1afdc05e73022cd4d74ec2d49db1ff65a0e71c"} Sep 29 22:48:52 crc kubenswrapper[4922]: I0929 22:48:52.314775 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"215d795e-73e0-44f4-9c4a-3eb67bbe9b08","Type":"ContainerStarted","Data":"e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884"} Sep 29 22:48:52 crc kubenswrapper[4922]: I0929 22:48:52.315226 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"215d795e-73e0-44f4-9c4a-3eb67bbe9b08","Type":"ContainerStarted","Data":"cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a"} Sep 29 22:48:52 crc kubenswrapper[4922]: I0929 22:48:52.319751 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dfe7291a-aae6-4a8f-9f46-fa4594582dfe","Type":"ContainerStarted","Data":"be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd"} Sep 29 22:48:52 crc kubenswrapper[4922]: I0929 22:48:52.363019 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.362983137 podStartE2EDuration="2.362983137s" podCreationTimestamp="2025-09-29 22:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:52.350973351 +0000 UTC m=+1336.661262204" watchObservedRunningTime="2025-09-29 22:48:52.362983137 +0000 UTC m=+1336.673271990" Sep 29 22:48:52 crc kubenswrapper[4922]: I0929 22:48:52.384085 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.384070228 podStartE2EDuration="2.384070228s" podCreationTimestamp="2025-09-29 22:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:52.382662793 +0000 UTC m=+1336.692951636" watchObservedRunningTime="2025-09-29 22:48:52.384070228 +0000 UTC m=+1336.694359041" Sep 29 22:48:52 crc kubenswrapper[4922]: I0929 22:48:52.417353 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 22:48:52 crc kubenswrapper[4922]: I0929 22:48:52.418282 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 22:48:52 crc kubenswrapper[4922]: I0929 22:48:52.419271 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 22:48:52 crc kubenswrapper[4922]: I0929 22:48:52.448985 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.331240 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.335996 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.585125 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kl2pv"] Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.586710 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.600070 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kl2pv"] Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.665865 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.665945 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47dtk\" (UniqueName: \"kubernetes.io/projected/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-kube-api-access-47dtk\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.666037 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.666057 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.666186 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.666217 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.767369 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47dtk\" (UniqueName: \"kubernetes.io/projected/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-kube-api-access-47dtk\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.767505 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.767533 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.767627 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.767646 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.767694 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.768573 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.768650 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.769153 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.769343 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.769825 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.785377 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47dtk\" (UniqueName: \"kubernetes.io/projected/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-kube-api-access-47dtk\") pod \"dnsmasq-dns-5c7b6c5df9-kl2pv\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:53 crc kubenswrapper[4922]: I0929 22:48:53.915939 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:54 crc kubenswrapper[4922]: I0929 22:48:54.375189 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kl2pv"] Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.322562 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.323184 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="ceilometer-central-agent" containerID="cri-o://8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70" gracePeriod=30 Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.324809 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="proxy-httpd" containerID="cri-o://566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445" gracePeriod=30 Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.324852 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="ceilometer-notification-agent" containerID="cri-o://858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418" gracePeriod=30 Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.324919 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="sg-core" containerID="cri-o://7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a" gracePeriod=30 Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.341858 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.192:3000/\": EOF" Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.359027 4922 generic.go:334] "Generic (PLEG): container finished" podID="a41c5e41-6db6-44dc-989d-d7a8ed8ae091" containerID="9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6" exitCode=0 Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.359240 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" event={"ID":"a41c5e41-6db6-44dc-989d-d7a8ed8ae091","Type":"ContainerDied","Data":"9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6"} Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.359290 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" event={"ID":"a41c5e41-6db6-44dc-989d-d7a8ed8ae091","Type":"ContainerStarted","Data":"f26577b05af0203a7b02b4066c4acd21a533001ed1bbdc676fb82713899454f9"} Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.746793 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.780353 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 22:48:55 crc kubenswrapper[4922]: I0929 22:48:55.780420 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.133332 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.371442 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" event={"ID":"a41c5e41-6db6-44dc-989d-d7a8ed8ae091","Type":"ContainerStarted","Data":"bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c"} Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.371735 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.379779 4922 generic.go:334] "Generic (PLEG): container finished" podID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerID="566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445" exitCode=0 Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.379806 4922 generic.go:334] "Generic (PLEG): container finished" podID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerID="7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a" exitCode=2 Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.379813 4922 generic.go:334] "Generic (PLEG): container finished" podID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerID="8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70" exitCode=0 Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.379961 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-log" containerID="cri-o://60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b" gracePeriod=30 Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.380182 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerDied","Data":"566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445"} Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.380205 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerDied","Data":"7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a"} Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.380216 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerDied","Data":"8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70"} Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.380262 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-api" containerID="cri-o://ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c" gracePeriod=30 Sep 29 22:48:56 crc kubenswrapper[4922]: I0929 22:48:56.400941 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" podStartSLOduration=3.400926997 podStartE2EDuration="3.400926997s" podCreationTimestamp="2025-09-29 22:48:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:48:56.396677332 +0000 UTC m=+1340.706966145" watchObservedRunningTime="2025-09-29 22:48:56.400926997 +0000 UTC m=+1340.711215810" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.174860 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.233366 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-scripts\") pod \"00e87e6a-cbc8-494c-989a-1465b0e31908\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.233474 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-config-data\") pod \"00e87e6a-cbc8-494c-989a-1465b0e31908\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.233533 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-ceilometer-tls-certs\") pod \"00e87e6a-cbc8-494c-989a-1465b0e31908\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.233569 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-combined-ca-bundle\") pod \"00e87e6a-cbc8-494c-989a-1465b0e31908\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.233605 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-sg-core-conf-yaml\") pod \"00e87e6a-cbc8-494c-989a-1465b0e31908\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.233627 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-log-httpd\") pod \"00e87e6a-cbc8-494c-989a-1465b0e31908\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.234294 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-run-httpd\") pod \"00e87e6a-cbc8-494c-989a-1465b0e31908\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.234360 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95k24\" (UniqueName: \"kubernetes.io/projected/00e87e6a-cbc8-494c-989a-1465b0e31908-kube-api-access-95k24\") pod \"00e87e6a-cbc8-494c-989a-1465b0e31908\" (UID: \"00e87e6a-cbc8-494c-989a-1465b0e31908\") " Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.234557 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "00e87e6a-cbc8-494c-989a-1465b0e31908" (UID: "00e87e6a-cbc8-494c-989a-1465b0e31908"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.234672 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "00e87e6a-cbc8-494c-989a-1465b0e31908" (UID: "00e87e6a-cbc8-494c-989a-1465b0e31908"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.235202 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.235218 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/00e87e6a-cbc8-494c-989a-1465b0e31908-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.242580 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00e87e6a-cbc8-494c-989a-1465b0e31908-kube-api-access-95k24" (OuterVolumeSpecName: "kube-api-access-95k24") pod "00e87e6a-cbc8-494c-989a-1465b0e31908" (UID: "00e87e6a-cbc8-494c-989a-1465b0e31908"). InnerVolumeSpecName "kube-api-access-95k24". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.248172 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-scripts" (OuterVolumeSpecName: "scripts") pod "00e87e6a-cbc8-494c-989a-1465b0e31908" (UID: "00e87e6a-cbc8-494c-989a-1465b0e31908"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.292305 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "00e87e6a-cbc8-494c-989a-1465b0e31908" (UID: "00e87e6a-cbc8-494c-989a-1465b0e31908"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.309568 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "00e87e6a-cbc8-494c-989a-1465b0e31908" (UID: "00e87e6a-cbc8-494c-989a-1465b0e31908"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.322209 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "00e87e6a-cbc8-494c-989a-1465b0e31908" (UID: "00e87e6a-cbc8-494c-989a-1465b0e31908"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.336912 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95k24\" (UniqueName: \"kubernetes.io/projected/00e87e6a-cbc8-494c-989a-1465b0e31908-kube-api-access-95k24\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.336947 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.336961 4922 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.336974 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.336986 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.341873 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-config-data" (OuterVolumeSpecName: "config-data") pod "00e87e6a-cbc8-494c-989a-1465b0e31908" (UID: "00e87e6a-cbc8-494c-989a-1465b0e31908"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.393224 4922 generic.go:334] "Generic (PLEG): container finished" podID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerID="858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418" exitCode=0 Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.393289 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerDied","Data":"858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418"} Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.393315 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"00e87e6a-cbc8-494c-989a-1465b0e31908","Type":"ContainerDied","Data":"6087909a9226695e19010d44f0458944cbf0a039b0e2827f53651f3342a26050"} Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.393331 4922 scope.go:117] "RemoveContainer" containerID="566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.393358 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.397766 4922 generic.go:334] "Generic (PLEG): container finished" podID="2db43b28-b025-4538-ab26-cda934938671" containerID="60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b" exitCode=143 Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.398585 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db43b28-b025-4538-ab26-cda934938671","Type":"ContainerDied","Data":"60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b"} Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.423212 4922 scope.go:117] "RemoveContainer" containerID="7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.436509 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.438987 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00e87e6a-cbc8-494c-989a-1465b0e31908-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.443081 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.450053 4922 scope.go:117] "RemoveContainer" containerID="858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.469501 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:57 crc kubenswrapper[4922]: E0929 22:48:57.469980 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="proxy-httpd" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.470003 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="proxy-httpd" Sep 29 22:48:57 crc kubenswrapper[4922]: E0929 22:48:57.470039 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="ceilometer-notification-agent" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.470049 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="ceilometer-notification-agent" Sep 29 22:48:57 crc kubenswrapper[4922]: E0929 22:48:57.470071 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="ceilometer-central-agent" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.470080 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="ceilometer-central-agent" Sep 29 22:48:57 crc kubenswrapper[4922]: E0929 22:48:57.470103 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="sg-core" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.470111 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="sg-core" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.470364 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="ceilometer-notification-agent" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.470433 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="sg-core" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.470467 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="ceilometer-central-agent" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.470485 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" containerName="proxy-httpd" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.472690 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.475633 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.475709 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.475822 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.485743 4922 scope.go:117] "RemoveContainer" containerID="8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.495286 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.510712 4922 scope.go:117] "RemoveContainer" containerID="566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445" Sep 29 22:48:57 crc kubenswrapper[4922]: E0929 22:48:57.511134 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445\": container with ID starting with 566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445 not found: ID does not exist" containerID="566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.511189 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445"} err="failed to get container status \"566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445\": rpc error: code = NotFound desc = could not find container \"566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445\": container with ID starting with 566ccf38e79b44f3dc99e1af00a47b6d81a13f68563930a5f92a50a94e92b445 not found: ID does not exist" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.511214 4922 scope.go:117] "RemoveContainer" containerID="7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a" Sep 29 22:48:57 crc kubenswrapper[4922]: E0929 22:48:57.511750 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a\": container with ID starting with 7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a not found: ID does not exist" containerID="7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.511780 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a"} err="failed to get container status \"7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a\": rpc error: code = NotFound desc = could not find container \"7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a\": container with ID starting with 7513a3610b0600c1297b0b5d20192a8fd9ea3f53377001b34c330dc8740f1a0a not found: ID does not exist" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.511806 4922 scope.go:117] "RemoveContainer" containerID="858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418" Sep 29 22:48:57 crc kubenswrapper[4922]: E0929 22:48:57.512119 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418\": container with ID starting with 858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418 not found: ID does not exist" containerID="858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.512145 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418"} err="failed to get container status \"858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418\": rpc error: code = NotFound desc = could not find container \"858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418\": container with ID starting with 858d89f011a66e30b758e507936371df95539a77a0619ac291ac926d3a349418 not found: ID does not exist" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.512160 4922 scope.go:117] "RemoveContainer" containerID="8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70" Sep 29 22:48:57 crc kubenswrapper[4922]: E0929 22:48:57.512349 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70\": container with ID starting with 8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70 not found: ID does not exist" containerID="8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.512369 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70"} err="failed to get container status \"8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70\": rpc error: code = NotFound desc = could not find container \"8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70\": container with ID starting with 8cbc80a365e077ea875567f0ef14761ee1608a12d33d5a98cbebd4530c685f70 not found: ID does not exist" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.539967 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.540016 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-config-data\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.540050 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-run-httpd\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.540088 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-log-httpd\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.540135 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqg8k\" (UniqueName: \"kubernetes.io/projected/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-kube-api-access-zqg8k\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.540153 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.540169 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-scripts\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.540194 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.642130 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqg8k\" (UniqueName: \"kubernetes.io/projected/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-kube-api-access-zqg8k\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.642186 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.642216 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-scripts\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.642254 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.642308 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.642353 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-config-data\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.642548 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-run-httpd\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.642689 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-log-httpd\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.643218 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-log-httpd\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.643176 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-run-httpd\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.646847 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-config-data\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.647310 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.648889 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.649426 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-scripts\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.652487 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.663747 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqg8k\" (UniqueName: \"kubernetes.io/projected/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-kube-api-access-zqg8k\") pod \"ceilometer-0\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " pod="openstack/ceilometer-0" Sep 29 22:48:57 crc kubenswrapper[4922]: I0929 22:48:57.799965 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:48:58 crc kubenswrapper[4922]: I0929 22:48:58.283778 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:48:58 crc kubenswrapper[4922]: W0929 22:48:58.287269 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0617fd51_e6f8_4cb7_8b63_cadf8ddaf031.slice/crio-5f0ee15ea3a4662f650cdb9075066dfcfb6cc301c07c2ae3eac8ec7f566de08a WatchSource:0}: Error finding container 5f0ee15ea3a4662f650cdb9075066dfcfb6cc301c07c2ae3eac8ec7f566de08a: Status 404 returned error can't find the container with id 5f0ee15ea3a4662f650cdb9075066dfcfb6cc301c07c2ae3eac8ec7f566de08a Sep 29 22:48:58 crc kubenswrapper[4922]: I0929 22:48:58.413739 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerStarted","Data":"5f0ee15ea3a4662f650cdb9075066dfcfb6cc301c07c2ae3eac8ec7f566de08a"} Sep 29 22:48:58 crc kubenswrapper[4922]: I0929 22:48:58.446429 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00e87e6a-cbc8-494c-989a-1465b0e31908" path="/var/lib/kubelet/pods/00e87e6a-cbc8-494c-989a-1465b0e31908/volumes" Sep 29 22:48:59 crc kubenswrapper[4922]: I0929 22:48:59.429176 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerStarted","Data":"13fd77bc452c12f556dd75f45bf37781a1403c31edf646ab5a140812ff690364"} Sep 29 22:48:59 crc kubenswrapper[4922]: I0929 22:48:59.950158 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:48:59 crc kubenswrapper[4922]: I0929 22:48:59.996661 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-config-data\") pod \"2db43b28-b025-4538-ab26-cda934938671\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " Sep 29 22:48:59 crc kubenswrapper[4922]: I0929 22:48:59.996814 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cb78l\" (UniqueName: \"kubernetes.io/projected/2db43b28-b025-4538-ab26-cda934938671-kube-api-access-cb78l\") pod \"2db43b28-b025-4538-ab26-cda934938671\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " Sep 29 22:48:59 crc kubenswrapper[4922]: I0929 22:48:59.996931 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db43b28-b025-4538-ab26-cda934938671-logs\") pod \"2db43b28-b025-4538-ab26-cda934938671\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " Sep 29 22:48:59 crc kubenswrapper[4922]: I0929 22:48:59.996965 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-combined-ca-bundle\") pod \"2db43b28-b025-4538-ab26-cda934938671\" (UID: \"2db43b28-b025-4538-ab26-cda934938671\") " Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.000735 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2db43b28-b025-4538-ab26-cda934938671-logs" (OuterVolumeSpecName: "logs") pod "2db43b28-b025-4538-ab26-cda934938671" (UID: "2db43b28-b025-4538-ab26-cda934938671"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.002672 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2db43b28-b025-4538-ab26-cda934938671-kube-api-access-cb78l" (OuterVolumeSpecName: "kube-api-access-cb78l") pod "2db43b28-b025-4538-ab26-cda934938671" (UID: "2db43b28-b025-4538-ab26-cda934938671"). InnerVolumeSpecName "kube-api-access-cb78l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.029520 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2db43b28-b025-4538-ab26-cda934938671" (UID: "2db43b28-b025-4538-ab26-cda934938671"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.050313 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-config-data" (OuterVolumeSpecName: "config-data") pod "2db43b28-b025-4538-ab26-cda934938671" (UID: "2db43b28-b025-4538-ab26-cda934938671"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.098137 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cb78l\" (UniqueName: \"kubernetes.io/projected/2db43b28-b025-4538-ab26-cda934938671-kube-api-access-cb78l\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.098170 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2db43b28-b025-4538-ab26-cda934938671-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.098180 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.098188 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db43b28-b025-4538-ab26-cda934938671-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.438713 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerStarted","Data":"ce05b6489484a12ed071a747de16f8ec141d68489e436390d82db8647d849b69"} Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.439207 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerStarted","Data":"bcdb4f9a717ab87bcdb1ad12cb92702bad850a19ba4db942f610381ad13eb2b3"} Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.440679 4922 generic.go:334] "Generic (PLEG): container finished" podID="2db43b28-b025-4538-ab26-cda934938671" containerID="ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c" exitCode=0 Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.440723 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.440733 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db43b28-b025-4538-ab26-cda934938671","Type":"ContainerDied","Data":"ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c"} Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.440764 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2db43b28-b025-4538-ab26-cda934938671","Type":"ContainerDied","Data":"b108d88e8ec3802913563bfded99814b76421e56d2b9ba7a460026c5b7e47d3d"} Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.440781 4922 scope.go:117] "RemoveContainer" containerID="ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.475450 4922 scope.go:117] "RemoveContainer" containerID="60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.521290 4922 scope.go:117] "RemoveContainer" containerID="ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c" Sep 29 22:49:00 crc kubenswrapper[4922]: E0929 22:49:00.521802 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c\": container with ID starting with ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c not found: ID does not exist" containerID="ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.521849 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c"} err="failed to get container status \"ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c\": rpc error: code = NotFound desc = could not find container \"ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c\": container with ID starting with ddadd397eddd0f62aaf603fb4bf0c13dc0947adf7bcf237036f182afb581dd3c not found: ID does not exist" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.521875 4922 scope.go:117] "RemoveContainer" containerID="60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b" Sep 29 22:49:00 crc kubenswrapper[4922]: E0929 22:49:00.522231 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b\": container with ID starting with 60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b not found: ID does not exist" containerID="60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.522270 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b"} err="failed to get container status \"60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b\": rpc error: code = NotFound desc = could not find container \"60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b\": container with ID starting with 60e8a966bacfc373ec6ee368e5ea940aa2b91e0f88ca8cf6d1a19be95cccdd1b not found: ID does not exist" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.747289 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.780016 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.780338 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 22:49:00 crc kubenswrapper[4922]: I0929 22:49:00.806321 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.468704 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.639484 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-n4p4t"] Sep 29 22:49:01 crc kubenswrapper[4922]: E0929 22:49:01.640311 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-log" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.640333 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-log" Sep 29 22:49:01 crc kubenswrapper[4922]: E0929 22:49:01.640386 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-api" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.640421 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-api" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.640654 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-api" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.640699 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db43b28-b025-4538-ab26-cda934938671" containerName="nova-api-log" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.641545 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.645452 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.645683 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.650748 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-n4p4t"] Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.729298 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5qv2\" (UniqueName: \"kubernetes.io/projected/e4df9702-b583-4c89-8412-c99de320208c-kube-api-access-k5qv2\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.729402 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-scripts\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.729483 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.729572 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-config-data\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.792496 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.792515 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.194:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.830553 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5qv2\" (UniqueName: \"kubernetes.io/projected/e4df9702-b583-4c89-8412-c99de320208c-kube-api-access-k5qv2\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.830599 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-scripts\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.830663 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.831444 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-config-data\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.835081 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-scripts\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.835416 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-config-data\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.836919 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:01 crc kubenswrapper[4922]: I0929 22:49:01.847279 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5qv2\" (UniqueName: \"kubernetes.io/projected/e4df9702-b583-4c89-8412-c99de320208c-kube-api-access-k5qv2\") pod \"nova-cell1-cell-mapping-n4p4t\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:02 crc kubenswrapper[4922]: I0929 22:49:02.007663 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:02 crc kubenswrapper[4922]: I0929 22:49:02.465309 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerStarted","Data":"cae850c08219cac4ce375335d50b6de2b3c7fb4df5328a2868d0238ce2244141"} Sep 29 22:49:02 crc kubenswrapper[4922]: I0929 22:49:02.466377 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 22:49:02 crc kubenswrapper[4922]: I0929 22:49:02.483381 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-n4p4t"] Sep 29 22:49:02 crc kubenswrapper[4922]: I0929 22:49:02.499046 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.313271958 podStartE2EDuration="5.499026372s" podCreationTimestamp="2025-09-29 22:48:57 +0000 UTC" firstStartedPulling="2025-09-29 22:48:58.289930186 +0000 UTC m=+1342.600218999" lastFinishedPulling="2025-09-29 22:49:01.47568459 +0000 UTC m=+1345.785973413" observedRunningTime="2025-09-29 22:49:02.491681831 +0000 UTC m=+1346.801970644" watchObservedRunningTime="2025-09-29 22:49:02.499026372 +0000 UTC m=+1346.809315185" Sep 29 22:49:03 crc kubenswrapper[4922]: I0929 22:49:03.474875 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n4p4t" event={"ID":"e4df9702-b583-4c89-8412-c99de320208c","Type":"ContainerStarted","Data":"f77d0a3f2b3eb711b2f8c5cdc56a0b17663397edcfbabb2c2ea7245deb82d352"} Sep 29 22:49:03 crc kubenswrapper[4922]: I0929 22:49:03.475128 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n4p4t" event={"ID":"e4df9702-b583-4c89-8412-c99de320208c","Type":"ContainerStarted","Data":"fd56f20d2a910e4f756fb97fae511b0e1073cfc4dc2e01e9958091c9da8e33e4"} Sep 29 22:49:03 crc kubenswrapper[4922]: I0929 22:49:03.503767 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-n4p4t" podStartSLOduration=2.503743355 podStartE2EDuration="2.503743355s" podCreationTimestamp="2025-09-29 22:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:49:03.49342073 +0000 UTC m=+1347.803709553" watchObservedRunningTime="2025-09-29 22:49:03.503743355 +0000 UTC m=+1347.814032208" Sep 29 22:49:03 crc kubenswrapper[4922]: I0929 22:49:03.917798 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:49:03 crc kubenswrapper[4922]: I0929 22:49:03.978596 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-mqchm"] Sep 29 22:49:03 crc kubenswrapper[4922]: I0929 22:49:03.978969 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" podUID="5f316176-d569-40e2-a666-06d83e6bb959" containerName="dnsmasq-dns" containerID="cri-o://3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1" gracePeriod=10 Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.471610 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.487667 4922 generic.go:334] "Generic (PLEG): container finished" podID="5f316176-d569-40e2-a666-06d83e6bb959" containerID="3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1" exitCode=0 Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.487706 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.487755 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" event={"ID":"5f316176-d569-40e2-a666-06d83e6bb959","Type":"ContainerDied","Data":"3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1"} Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.487809 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-mqchm" event={"ID":"5f316176-d569-40e2-a666-06d83e6bb959","Type":"ContainerDied","Data":"431509b486c53b1854dcfa581e39427e3b7603f91a1576c78f71218d8846586a"} Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.487832 4922 scope.go:117] "RemoveContainer" containerID="3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.497012 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-swift-storage-0\") pod \"5f316176-d569-40e2-a666-06d83e6bb959\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.497101 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-config\") pod \"5f316176-d569-40e2-a666-06d83e6bb959\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.497145 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-sb\") pod \"5f316176-d569-40e2-a666-06d83e6bb959\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.497210 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-nb\") pod \"5f316176-d569-40e2-a666-06d83e6bb959\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.497291 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbv5b\" (UniqueName: \"kubernetes.io/projected/5f316176-d569-40e2-a666-06d83e6bb959-kube-api-access-cbv5b\") pod \"5f316176-d569-40e2-a666-06d83e6bb959\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.497341 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-svc\") pod \"5f316176-d569-40e2-a666-06d83e6bb959\" (UID: \"5f316176-d569-40e2-a666-06d83e6bb959\") " Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.526153 4922 scope.go:117] "RemoveContainer" containerID="7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.538615 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f316176-d569-40e2-a666-06d83e6bb959-kube-api-access-cbv5b" (OuterVolumeSpecName: "kube-api-access-cbv5b") pod "5f316176-d569-40e2-a666-06d83e6bb959" (UID: "5f316176-d569-40e2-a666-06d83e6bb959"). InnerVolumeSpecName "kube-api-access-cbv5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.571774 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5f316176-d569-40e2-a666-06d83e6bb959" (UID: "5f316176-d569-40e2-a666-06d83e6bb959"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.580549 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5f316176-d569-40e2-a666-06d83e6bb959" (UID: "5f316176-d569-40e2-a666-06d83e6bb959"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.585378 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5f316176-d569-40e2-a666-06d83e6bb959" (UID: "5f316176-d569-40e2-a666-06d83e6bb959"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.599336 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbv5b\" (UniqueName: \"kubernetes.io/projected/5f316176-d569-40e2-a666-06d83e6bb959-kube-api-access-cbv5b\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.599358 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.599368 4922 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.599376 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.613990 4922 scope.go:117] "RemoveContainer" containerID="3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1" Sep 29 22:49:04 crc kubenswrapper[4922]: E0929 22:49:04.614660 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1\": container with ID starting with 3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1 not found: ID does not exist" containerID="3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.615055 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1"} err="failed to get container status \"3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1\": rpc error: code = NotFound desc = could not find container \"3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1\": container with ID starting with 3deb913c92acabb6e1663c77efb958b157d62db75d030da0d3aa785b7dc7c7e1 not found: ID does not exist" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.615088 4922 scope.go:117] "RemoveContainer" containerID="7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e" Sep 29 22:49:04 crc kubenswrapper[4922]: E0929 22:49:04.615601 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e\": container with ID starting with 7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e not found: ID does not exist" containerID="7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.615633 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e"} err="failed to get container status \"7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e\": rpc error: code = NotFound desc = could not find container \"7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e\": container with ID starting with 7d9c09debe84cb4a40d7f5444ae84ca99254a8baa3dce0f489bc24e4bbcb615e not found: ID does not exist" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.619622 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-config" (OuterVolumeSpecName: "config") pod "5f316176-d569-40e2-a666-06d83e6bb959" (UID: "5f316176-d569-40e2-a666-06d83e6bb959"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.628307 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5f316176-d569-40e2-a666-06d83e6bb959" (UID: "5f316176-d569-40e2-a666-06d83e6bb959"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.701390 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.701428 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f316176-d569-40e2-a666-06d83e6bb959-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.816761 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-mqchm"] Sep 29 22:49:04 crc kubenswrapper[4922]: I0929 22:49:04.823211 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-mqchm"] Sep 29 22:49:06 crc kubenswrapper[4922]: I0929 22:49:06.461488 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f316176-d569-40e2-a666-06d83e6bb959" path="/var/lib/kubelet/pods/5f316176-d569-40e2-a666-06d83e6bb959/volumes" Sep 29 22:49:07 crc kubenswrapper[4922]: I0929 22:49:07.524265 4922 generic.go:334] "Generic (PLEG): container finished" podID="e4df9702-b583-4c89-8412-c99de320208c" containerID="f77d0a3f2b3eb711b2f8c5cdc56a0b17663397edcfbabb2c2ea7245deb82d352" exitCode=0 Sep 29 22:49:07 crc kubenswrapper[4922]: I0929 22:49:07.524321 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n4p4t" event={"ID":"e4df9702-b583-4c89-8412-c99de320208c","Type":"ContainerDied","Data":"f77d0a3f2b3eb711b2f8c5cdc56a0b17663397edcfbabb2c2ea7245deb82d352"} Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.031470 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.198352 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-config-data\") pod \"e4df9702-b583-4c89-8412-c99de320208c\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.198764 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-scripts\") pod \"e4df9702-b583-4c89-8412-c99de320208c\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.198961 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5qv2\" (UniqueName: \"kubernetes.io/projected/e4df9702-b583-4c89-8412-c99de320208c-kube-api-access-k5qv2\") pod \"e4df9702-b583-4c89-8412-c99de320208c\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.199017 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-combined-ca-bundle\") pod \"e4df9702-b583-4c89-8412-c99de320208c\" (UID: \"e4df9702-b583-4c89-8412-c99de320208c\") " Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.207703 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4df9702-b583-4c89-8412-c99de320208c-kube-api-access-k5qv2" (OuterVolumeSpecName: "kube-api-access-k5qv2") pod "e4df9702-b583-4c89-8412-c99de320208c" (UID: "e4df9702-b583-4c89-8412-c99de320208c"). InnerVolumeSpecName "kube-api-access-k5qv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.207921 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-scripts" (OuterVolumeSpecName: "scripts") pod "e4df9702-b583-4c89-8412-c99de320208c" (UID: "e4df9702-b583-4c89-8412-c99de320208c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.249814 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-config-data" (OuterVolumeSpecName: "config-data") pod "e4df9702-b583-4c89-8412-c99de320208c" (UID: "e4df9702-b583-4c89-8412-c99de320208c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.252625 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4df9702-b583-4c89-8412-c99de320208c" (UID: "e4df9702-b583-4c89-8412-c99de320208c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.301899 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.301952 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.302013 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5qv2\" (UniqueName: \"kubernetes.io/projected/e4df9702-b583-4c89-8412-c99de320208c-kube-api-access-k5qv2\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.302037 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4df9702-b583-4c89-8412-c99de320208c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.552225 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n4p4t" event={"ID":"e4df9702-b583-4c89-8412-c99de320208c","Type":"ContainerDied","Data":"fd56f20d2a910e4f756fb97fae511b0e1073cfc4dc2e01e9958091c9da8e33e4"} Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.552321 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd56f20d2a910e4f756fb97fae511b0e1073cfc4dc2e01e9958091c9da8e33e4" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.552329 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n4p4t" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.737465 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.737715 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="b6cf5218-da1f-4979-a8b2-1f4c49981307" containerName="nova-scheduler-scheduler" containerID="cri-o://d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9" gracePeriod=30 Sep 29 22:49:09 crc kubenswrapper[4922]: E0929 22:49:09.751882 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4df9702_b583_4c89_8412_c99de320208c.slice\": RecentStats: unable to find data in memory cache]" Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.799676 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.800011 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-log" containerID="cri-o://cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a" gracePeriod=30 Sep 29 22:49:09 crc kubenswrapper[4922]: I0929 22:49:09.800132 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-metadata" containerID="cri-o://e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884" gracePeriod=30 Sep 29 22:49:10 crc kubenswrapper[4922]: I0929 22:49:10.566767 4922 generic.go:334] "Generic (PLEG): container finished" podID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerID="cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a" exitCode=143 Sep 29 22:49:10 crc kubenswrapper[4922]: I0929 22:49:10.566848 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"215d795e-73e0-44f4-9c4a-3eb67bbe9b08","Type":"ContainerDied","Data":"cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a"} Sep 29 22:49:11 crc kubenswrapper[4922]: E0929 22:49:11.345755 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 22:49:11 crc kubenswrapper[4922]: E0929 22:49:11.347810 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 22:49:11 crc kubenswrapper[4922]: E0929 22:49:11.349997 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 22:49:11 crc kubenswrapper[4922]: E0929 22:49:11.350094 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="b6cf5218-da1f-4979-a8b2-1f4c49981307" containerName="nova-scheduler-scheduler" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.457068 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.591235 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-combined-ca-bundle\") pod \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.591274 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5fbh\" (UniqueName: \"kubernetes.io/projected/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-kube-api-access-z5fbh\") pod \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.591307 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-nova-metadata-tls-certs\") pod \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.591362 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-config-data\") pod \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.591546 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-logs\") pod \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\" (UID: \"215d795e-73e0-44f4-9c4a-3eb67bbe9b08\") " Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.595173 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-logs" (OuterVolumeSpecName: "logs") pod "215d795e-73e0-44f4-9c4a-3eb67bbe9b08" (UID: "215d795e-73e0-44f4-9c4a-3eb67bbe9b08"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.606644 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-kube-api-access-z5fbh" (OuterVolumeSpecName: "kube-api-access-z5fbh") pod "215d795e-73e0-44f4-9c4a-3eb67bbe9b08" (UID: "215d795e-73e0-44f4-9c4a-3eb67bbe9b08"). InnerVolumeSpecName "kube-api-access-z5fbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.622790 4922 generic.go:334] "Generic (PLEG): container finished" podID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerID="e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884" exitCode=0 Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.622837 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.622848 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"215d795e-73e0-44f4-9c4a-3eb67bbe9b08","Type":"ContainerDied","Data":"e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884"} Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.622887 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"215d795e-73e0-44f4-9c4a-3eb67bbe9b08","Type":"ContainerDied","Data":"6d7d4f2c4cababaa9fea27a59eb2eae5c7dd4d8644745594a9588ceecd84b6f3"} Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.622915 4922 scope.go:117] "RemoveContainer" containerID="e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.635476 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-config-data" (OuterVolumeSpecName: "config-data") pod "215d795e-73e0-44f4-9c4a-3eb67bbe9b08" (UID: "215d795e-73e0-44f4-9c4a-3eb67bbe9b08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.638723 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "215d795e-73e0-44f4-9c4a-3eb67bbe9b08" (UID: "215d795e-73e0-44f4-9c4a-3eb67bbe9b08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.662710 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "215d795e-73e0-44f4-9c4a-3eb67bbe9b08" (UID: "215d795e-73e0-44f4-9c4a-3eb67bbe9b08"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.686581 4922 scope.go:117] "RemoveContainer" containerID="cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.695384 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.695501 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.695576 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5fbh\" (UniqueName: \"kubernetes.io/projected/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-kube-api-access-z5fbh\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.695636 4922 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.695693 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215d795e-73e0-44f4-9c4a-3eb67bbe9b08-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.705610 4922 scope.go:117] "RemoveContainer" containerID="e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884" Sep 29 22:49:13 crc kubenswrapper[4922]: E0929 22:49:13.705990 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884\": container with ID starting with e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884 not found: ID does not exist" containerID="e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.706059 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884"} err="failed to get container status \"e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884\": rpc error: code = NotFound desc = could not find container \"e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884\": container with ID starting with e3a2a462dbbcbfc2b9011e1c1a4efc5804429bc8cfcf771ddbb3c210e240e884 not found: ID does not exist" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.706100 4922 scope.go:117] "RemoveContainer" containerID="cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a" Sep 29 22:49:13 crc kubenswrapper[4922]: E0929 22:49:13.706413 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a\": container with ID starting with cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a not found: ID does not exist" containerID="cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.706439 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a"} err="failed to get container status \"cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a\": rpc error: code = NotFound desc = could not find container \"cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a\": container with ID starting with cab016c9f1e0807c33b00f0989d97b65abbe60fd1e82c347fc8e304ad66d7a0a not found: ID does not exist" Sep 29 22:49:13 crc kubenswrapper[4922]: I0929 22:49:13.997821 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.008288 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.018560 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:49:14 crc kubenswrapper[4922]: E0929 22:49:14.018977 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-log" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.019002 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-log" Sep 29 22:49:14 crc kubenswrapper[4922]: E0929 22:49:14.019025 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f316176-d569-40e2-a666-06d83e6bb959" containerName="init" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.019034 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f316176-d569-40e2-a666-06d83e6bb959" containerName="init" Sep 29 22:49:14 crc kubenswrapper[4922]: E0929 22:49:14.019062 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-metadata" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.019071 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-metadata" Sep 29 22:49:14 crc kubenswrapper[4922]: E0929 22:49:14.019086 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f316176-d569-40e2-a666-06d83e6bb959" containerName="dnsmasq-dns" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.019094 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f316176-d569-40e2-a666-06d83e6bb959" containerName="dnsmasq-dns" Sep 29 22:49:14 crc kubenswrapper[4922]: E0929 22:49:14.019108 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4df9702-b583-4c89-8412-c99de320208c" containerName="nova-manage" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.019116 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4df9702-b583-4c89-8412-c99de320208c" containerName="nova-manage" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.019354 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-metadata" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.019379 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4df9702-b583-4c89-8412-c99de320208c" containerName="nova-manage" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.019422 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" containerName="nova-metadata-log" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.019446 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f316176-d569-40e2-a666-06d83e6bb959" containerName="dnsmasq-dns" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.020637 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.025449 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.025457 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.038572 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.103329 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-config-data\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.103379 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96spp\" (UniqueName: \"kubernetes.io/projected/8026992e-7dd1-42d9-b362-82febc75c072-kube-api-access-96spp\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.103423 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.103511 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.103565 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8026992e-7dd1-42d9-b362-82febc75c072-logs\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.205976 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.206142 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8026992e-7dd1-42d9-b362-82febc75c072-logs\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.206328 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-config-data\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.206426 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96spp\" (UniqueName: \"kubernetes.io/projected/8026992e-7dd1-42d9-b362-82febc75c072-kube-api-access-96spp\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.206480 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.207033 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8026992e-7dd1-42d9-b362-82febc75c072-logs\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.211904 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.211994 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-config-data\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.212820 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.237963 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96spp\" (UniqueName: \"kubernetes.io/projected/8026992e-7dd1-42d9-b362-82febc75c072-kube-api-access-96spp\") pod \"nova-metadata-0\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.408520 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.442071 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="215d795e-73e0-44f4-9c4a-3eb67bbe9b08" path="/var/lib/kubelet/pods/215d795e-73e0-44f4-9c4a-3eb67bbe9b08/volumes" Sep 29 22:49:14 crc kubenswrapper[4922]: I0929 22:49:14.727383 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:49:14 crc kubenswrapper[4922]: W0929 22:49:14.732358 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8026992e_7dd1_42d9_b362_82febc75c072.slice/crio-413b6a61bdcb727ee4cc28c78ea98a8e4a44d8830cab40b697aeb42829caa211 WatchSource:0}: Error finding container 413b6a61bdcb727ee4cc28c78ea98a8e4a44d8830cab40b697aeb42829caa211: Status 404 returned error can't find the container with id 413b6a61bdcb727ee4cc28c78ea98a8e4a44d8830cab40b697aeb42829caa211 Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.399106 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.531895 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-combined-ca-bundle\") pod \"b6cf5218-da1f-4979-a8b2-1f4c49981307\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.532079 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-config-data\") pod \"b6cf5218-da1f-4979-a8b2-1f4c49981307\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.532132 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkzff\" (UniqueName: \"kubernetes.io/projected/b6cf5218-da1f-4979-a8b2-1f4c49981307-kube-api-access-hkzff\") pod \"b6cf5218-da1f-4979-a8b2-1f4c49981307\" (UID: \"b6cf5218-da1f-4979-a8b2-1f4c49981307\") " Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.537378 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cf5218-da1f-4979-a8b2-1f4c49981307-kube-api-access-hkzff" (OuterVolumeSpecName: "kube-api-access-hkzff") pod "b6cf5218-da1f-4979-a8b2-1f4c49981307" (UID: "b6cf5218-da1f-4979-a8b2-1f4c49981307"). InnerVolumeSpecName "kube-api-access-hkzff". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.562029 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6cf5218-da1f-4979-a8b2-1f4c49981307" (UID: "b6cf5218-da1f-4979-a8b2-1f4c49981307"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.564090 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-config-data" (OuterVolumeSpecName: "config-data") pod "b6cf5218-da1f-4979-a8b2-1f4c49981307" (UID: "b6cf5218-da1f-4979-a8b2-1f4c49981307"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.634146 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.634182 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkzff\" (UniqueName: \"kubernetes.io/projected/b6cf5218-da1f-4979-a8b2-1f4c49981307-kube-api-access-hkzff\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.634203 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cf5218-da1f-4979-a8b2-1f4c49981307-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.670198 4922 generic.go:334] "Generic (PLEG): container finished" podID="b6cf5218-da1f-4979-a8b2-1f4c49981307" containerID="d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9" exitCode=0 Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.670250 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b6cf5218-da1f-4979-a8b2-1f4c49981307","Type":"ContainerDied","Data":"d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9"} Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.670272 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.670302 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b6cf5218-da1f-4979-a8b2-1f4c49981307","Type":"ContainerDied","Data":"4a302d7aa49b23c112d5f0d3b45ca48784473e7bb1050667d946696a61c0c88b"} Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.670329 4922 scope.go:117] "RemoveContainer" containerID="d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.675532 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8026992e-7dd1-42d9-b362-82febc75c072","Type":"ContainerStarted","Data":"3480a1ee34671883eedf597274a6f64fbdd34cfd4e44a319ae58d788dace7d36"} Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.675574 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8026992e-7dd1-42d9-b362-82febc75c072","Type":"ContainerStarted","Data":"e3c85f65a2283ba365db1c99edfb8fc2be5ee33fe88ee44750524b8d792b1cbf"} Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.675693 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8026992e-7dd1-42d9-b362-82febc75c072","Type":"ContainerStarted","Data":"413b6a61bdcb727ee4cc28c78ea98a8e4a44d8830cab40b697aeb42829caa211"} Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.699783 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.699764481 podStartE2EDuration="2.699764481s" podCreationTimestamp="2025-09-29 22:49:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:49:15.69405564 +0000 UTC m=+1360.004344453" watchObservedRunningTime="2025-09-29 22:49:15.699764481 +0000 UTC m=+1360.010053304" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.717377 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.732795 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.745753 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:49:15 crc kubenswrapper[4922]: E0929 22:49:15.746074 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6cf5218-da1f-4979-a8b2-1f4c49981307" containerName="nova-scheduler-scheduler" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.746084 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6cf5218-da1f-4979-a8b2-1f4c49981307" containerName="nova-scheduler-scheduler" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.746261 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6cf5218-da1f-4979-a8b2-1f4c49981307" containerName="nova-scheduler-scheduler" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.746744 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.746816 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.766023 4922 scope.go:117] "RemoveContainer" containerID="d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.766189 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 22:49:15 crc kubenswrapper[4922]: E0929 22:49:15.766559 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9\": container with ID starting with d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9 not found: ID does not exist" containerID="d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.766592 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9"} err="failed to get container status \"d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9\": rpc error: code = NotFound desc = could not find container \"d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9\": container with ID starting with d2b5800a4a372d73889953835b2b0917da607a92979021cf10b9067cbddbabe9 not found: ID does not exist" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.862920 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-config-data\") pod \"nova-scheduler-0\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.863217 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck5gz\" (UniqueName: \"kubernetes.io/projected/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-kube-api-access-ck5gz\") pod \"nova-scheduler-0\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.863255 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.965327 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-config-data\") pod \"nova-scheduler-0\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.965472 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck5gz\" (UniqueName: \"kubernetes.io/projected/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-kube-api-access-ck5gz\") pod \"nova-scheduler-0\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.965598 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.972905 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-config-data\") pod \"nova-scheduler-0\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.974236 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " pod="openstack/nova-scheduler-0" Sep 29 22:49:15 crc kubenswrapper[4922]: I0929 22:49:15.981881 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck5gz\" (UniqueName: \"kubernetes.io/projected/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-kube-api-access-ck5gz\") pod \"nova-scheduler-0\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " pod="openstack/nova-scheduler-0" Sep 29 22:49:16 crc kubenswrapper[4922]: I0929 22:49:16.083894 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:49:16 crc kubenswrapper[4922]: I0929 22:49:16.435047 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cf5218-da1f-4979-a8b2-1f4c49981307" path="/var/lib/kubelet/pods/b6cf5218-da1f-4979-a8b2-1f4c49981307/volumes" Sep 29 22:49:16 crc kubenswrapper[4922]: I0929 22:49:16.514942 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:49:16 crc kubenswrapper[4922]: W0929 22:49:16.517436 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ead89b5_3aff_47b9_9516_0eaa33dca7aa.slice/crio-98f17d11639c7e732de438cad7f9c3c5e13182daad0a9507918d564a973c72f4 WatchSource:0}: Error finding container 98f17d11639c7e732de438cad7f9c3c5e13182daad0a9507918d564a973c72f4: Status 404 returned error can't find the container with id 98f17d11639c7e732de438cad7f9c3c5e13182daad0a9507918d564a973c72f4 Sep 29 22:49:16 crc kubenswrapper[4922]: I0929 22:49:16.689991 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3ead89b5-3aff-47b9-9516-0eaa33dca7aa","Type":"ContainerStarted","Data":"98f17d11639c7e732de438cad7f9c3c5e13182daad0a9507918d564a973c72f4"} Sep 29 22:49:17 crc kubenswrapper[4922]: I0929 22:49:17.704589 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3ead89b5-3aff-47b9-9516-0eaa33dca7aa","Type":"ContainerStarted","Data":"52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c"} Sep 29 22:49:17 crc kubenswrapper[4922]: I0929 22:49:17.730124 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.730104955 podStartE2EDuration="2.730104955s" podCreationTimestamp="2025-09-29 22:49:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:49:17.725886348 +0000 UTC m=+1362.036175161" watchObservedRunningTime="2025-09-29 22:49:17.730104955 +0000 UTC m=+1362.040393778" Sep 29 22:49:19 crc kubenswrapper[4922]: I0929 22:49:19.409060 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 22:49:19 crc kubenswrapper[4922]: I0929 22:49:19.409474 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 22:49:21 crc kubenswrapper[4922]: I0929 22:49:21.084762 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 22:49:24 crc kubenswrapper[4922]: I0929 22:49:24.408800 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 22:49:24 crc kubenswrapper[4922]: I0929 22:49:24.409239 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 22:49:25 crc kubenswrapper[4922]: I0929 22:49:25.429612 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 22:49:25 crc kubenswrapper[4922]: I0929 22:49:25.429653 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 22:49:26 crc kubenswrapper[4922]: I0929 22:49:26.084946 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 22:49:26 crc kubenswrapper[4922]: I0929 22:49:26.138131 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 22:49:26 crc kubenswrapper[4922]: I0929 22:49:26.918299 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 22:49:27 crc kubenswrapper[4922]: I0929 22:49:27.808407 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 22:49:30 crc kubenswrapper[4922]: I0929 22:49:30.459812 4922 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod2db43b28-b025-4538-ab26-cda934938671"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod2db43b28-b025-4538-ab26-cda934938671] : Timed out while waiting for systemd to remove kubepods-besteffort-pod2db43b28_b025_4538_ab26_cda934938671.slice" Sep 29 22:49:30 crc kubenswrapper[4922]: E0929 22:49:30.460274 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod2db43b28-b025-4538-ab26-cda934938671] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod2db43b28-b025-4538-ab26-cda934938671] : Timed out while waiting for systemd to remove kubepods-besteffort-pod2db43b28_b025_4538_ab26_cda934938671.slice" pod="openstack/nova-api-0" podUID="2db43b28-b025-4538-ab26-cda934938671" Sep 29 22:49:30 crc kubenswrapper[4922]: I0929 22:49:30.912092 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:49:30 crc kubenswrapper[4922]: I0929 22:49:30.946723 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:49:30 crc kubenswrapper[4922]: I0929 22:49:30.969769 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:49:30 crc kubenswrapper[4922]: I0929 22:49:30.981892 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 22:49:30 crc kubenswrapper[4922]: I0929 22:49:30.984024 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:49:30 crc kubenswrapper[4922]: I0929 22:49:30.988811 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 22:49:30 crc kubenswrapper[4922]: I0929 22:49:30.988943 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 22:49:30 crc kubenswrapper[4922]: I0929 22:49:30.989180 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.011757 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.144994 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.145099 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d569\" (UniqueName: \"kubernetes.io/projected/391e4250-b978-4ce4-811d-ae2a81a8500f-kube-api-access-8d569\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.145200 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391e4250-b978-4ce4-811d-ae2a81a8500f-logs\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.145307 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.145430 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-public-tls-certs\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.145464 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-config-data\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.247136 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.248604 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-config-data\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.248643 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-public-tls-certs\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.248714 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.248804 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d569\" (UniqueName: \"kubernetes.io/projected/391e4250-b978-4ce4-811d-ae2a81a8500f-kube-api-access-8d569\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.248927 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391e4250-b978-4ce4-811d-ae2a81a8500f-logs\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.250064 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391e4250-b978-4ce4-811d-ae2a81a8500f-logs\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.255741 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.256491 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-public-tls-certs\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.256608 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-config-data\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.257461 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.274995 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d569\" (UniqueName: \"kubernetes.io/projected/391e4250-b978-4ce4-811d-ae2a81a8500f-kube-api-access-8d569\") pod \"nova-api-0\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.313374 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.834992 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:49:31 crc kubenswrapper[4922]: W0929 22:49:31.835255 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod391e4250_b978_4ce4_811d_ae2a81a8500f.slice/crio-c8eb20aadddd51f175050e57a2950a981ee6f98a098a6e3bb7dbd7640ad8550d WatchSource:0}: Error finding container c8eb20aadddd51f175050e57a2950a981ee6f98a098a6e3bb7dbd7640ad8550d: Status 404 returned error can't find the container with id c8eb20aadddd51f175050e57a2950a981ee6f98a098a6e3bb7dbd7640ad8550d Sep 29 22:49:31 crc kubenswrapper[4922]: I0929 22:49:31.925873 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"391e4250-b978-4ce4-811d-ae2a81a8500f","Type":"ContainerStarted","Data":"c8eb20aadddd51f175050e57a2950a981ee6f98a098a6e3bb7dbd7640ad8550d"} Sep 29 22:49:32 crc kubenswrapper[4922]: I0929 22:49:32.438214 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2db43b28-b025-4538-ab26-cda934938671" path="/var/lib/kubelet/pods/2db43b28-b025-4538-ab26-cda934938671/volumes" Sep 29 22:49:32 crc kubenswrapper[4922]: I0929 22:49:32.961000 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"391e4250-b978-4ce4-811d-ae2a81a8500f","Type":"ContainerStarted","Data":"19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127"} Sep 29 22:49:32 crc kubenswrapper[4922]: I0929 22:49:32.961088 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"391e4250-b978-4ce4-811d-ae2a81a8500f","Type":"ContainerStarted","Data":"6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7"} Sep 29 22:49:32 crc kubenswrapper[4922]: I0929 22:49:32.994922 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.994899682 podStartE2EDuration="2.994899682s" podCreationTimestamp="2025-09-29 22:49:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:49:32.980498266 +0000 UTC m=+1377.290787119" watchObservedRunningTime="2025-09-29 22:49:32.994899682 +0000 UTC m=+1377.305188485" Sep 29 22:49:34 crc kubenswrapper[4922]: I0929 22:49:34.414721 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 22:49:34 crc kubenswrapper[4922]: I0929 22:49:34.415280 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 22:49:34 crc kubenswrapper[4922]: I0929 22:49:34.440643 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 22:49:34 crc kubenswrapper[4922]: I0929 22:49:34.440724 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.142360 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-v2fc8"] Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.147716 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.156939 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-v2fc8"] Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.242465 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-utilities\") pod \"redhat-marketplace-v2fc8\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.242781 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-catalog-content\") pod \"redhat-marketplace-v2fc8\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.242922 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgph7\" (UniqueName: \"kubernetes.io/projected/7358ce30-c4a8-48d9-84bb-a2e89c716437-kube-api-access-kgph7\") pod \"redhat-marketplace-v2fc8\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.343759 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgph7\" (UniqueName: \"kubernetes.io/projected/7358ce30-c4a8-48d9-84bb-a2e89c716437-kube-api-access-kgph7\") pod \"redhat-marketplace-v2fc8\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.343826 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-utilities\") pod \"redhat-marketplace-v2fc8\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.343925 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-catalog-content\") pod \"redhat-marketplace-v2fc8\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.344337 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-catalog-content\") pod \"redhat-marketplace-v2fc8\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.344815 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-utilities\") pod \"redhat-marketplace-v2fc8\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.374121 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgph7\" (UniqueName: \"kubernetes.io/projected/7358ce30-c4a8-48d9-84bb-a2e89c716437-kube-api-access-kgph7\") pod \"redhat-marketplace-v2fc8\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:40 crc kubenswrapper[4922]: I0929 22:49:40.510123 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:41 crc kubenswrapper[4922]: I0929 22:49:41.006889 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-v2fc8"] Sep 29 22:49:41 crc kubenswrapper[4922]: I0929 22:49:41.055113 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v2fc8" event={"ID":"7358ce30-c4a8-48d9-84bb-a2e89c716437","Type":"ContainerStarted","Data":"3a6a0a56cb7489497abd9ac15895d779d72c8fa34bac557abf535cf5e3e5ec04"} Sep 29 22:49:41 crc kubenswrapper[4922]: I0929 22:49:41.314127 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 22:49:41 crc kubenswrapper[4922]: I0929 22:49:41.314185 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 22:49:42 crc kubenswrapper[4922]: I0929 22:49:42.072301 4922 generic.go:334] "Generic (PLEG): container finished" podID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerID="6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667" exitCode=0 Sep 29 22:49:42 crc kubenswrapper[4922]: I0929 22:49:42.072827 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v2fc8" event={"ID":"7358ce30-c4a8-48d9-84bb-a2e89c716437","Type":"ContainerDied","Data":"6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667"} Sep 29 22:49:42 crc kubenswrapper[4922]: I0929 22:49:42.079240 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 22:49:42 crc kubenswrapper[4922]: I0929 22:49:42.326599 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 22:49:42 crc kubenswrapper[4922]: I0929 22:49:42.326660 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 22:49:44 crc kubenswrapper[4922]: I0929 22:49:44.097188 4922 generic.go:334] "Generic (PLEG): container finished" podID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerID="89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c" exitCode=0 Sep 29 22:49:44 crc kubenswrapper[4922]: I0929 22:49:44.097359 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v2fc8" event={"ID":"7358ce30-c4a8-48d9-84bb-a2e89c716437","Type":"ContainerDied","Data":"89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c"} Sep 29 22:49:45 crc kubenswrapper[4922]: I0929 22:49:45.113930 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v2fc8" event={"ID":"7358ce30-c4a8-48d9-84bb-a2e89c716437","Type":"ContainerStarted","Data":"ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec"} Sep 29 22:49:45 crc kubenswrapper[4922]: I0929 22:49:45.142184 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-v2fc8" podStartSLOduration=2.696853855 podStartE2EDuration="5.142159445s" podCreationTimestamp="2025-09-29 22:49:40 +0000 UTC" firstStartedPulling="2025-09-29 22:49:42.078718502 +0000 UTC m=+1386.389007345" lastFinishedPulling="2025-09-29 22:49:44.524024092 +0000 UTC m=+1388.834312935" observedRunningTime="2025-09-29 22:49:45.139302593 +0000 UTC m=+1389.449591436" watchObservedRunningTime="2025-09-29 22:49:45.142159445 +0000 UTC m=+1389.452448298" Sep 29 22:49:46 crc kubenswrapper[4922]: I0929 22:49:46.924877 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wrzms"] Sep 29 22:49:46 crc kubenswrapper[4922]: I0929 22:49:46.928911 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:46 crc kubenswrapper[4922]: I0929 22:49:46.946738 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wrzms"] Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.080850 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-catalog-content\") pod \"certified-operators-wrzms\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.081174 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj44c\" (UniqueName: \"kubernetes.io/projected/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-kube-api-access-dj44c\") pod \"certified-operators-wrzms\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.081508 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-utilities\") pod \"certified-operators-wrzms\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.183109 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-utilities\") pod \"certified-operators-wrzms\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.183204 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-catalog-content\") pod \"certified-operators-wrzms\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.183244 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj44c\" (UniqueName: \"kubernetes.io/projected/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-kube-api-access-dj44c\") pod \"certified-operators-wrzms\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.183790 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-catalog-content\") pod \"certified-operators-wrzms\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.183996 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-utilities\") pod \"certified-operators-wrzms\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.207296 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj44c\" (UniqueName: \"kubernetes.io/projected/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-kube-api-access-dj44c\") pod \"certified-operators-wrzms\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.265674 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:47 crc kubenswrapper[4922]: I0929 22:49:47.738739 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wrzms"] Sep 29 22:49:47 crc kubenswrapper[4922]: W0929 22:49:47.746436 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7ce5da3_8c23_42a3_b2c6_f4a7a21c21af.slice/crio-a23ae850f2c468063fe19f0d32d09bca1d31e845941736fc065b18ea9e58f9b7 WatchSource:0}: Error finding container a23ae850f2c468063fe19f0d32d09bca1d31e845941736fc065b18ea9e58f9b7: Status 404 returned error can't find the container with id a23ae850f2c468063fe19f0d32d09bca1d31e845941736fc065b18ea9e58f9b7 Sep 29 22:49:48 crc kubenswrapper[4922]: I0929 22:49:48.166253 4922 generic.go:334] "Generic (PLEG): container finished" podID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerID="12e01f0de67ea47b5a75abcca4644ca8540a5a31fda9069fa43727ce2f66f65c" exitCode=0 Sep 29 22:49:48 crc kubenswrapper[4922]: I0929 22:49:48.166690 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrzms" event={"ID":"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af","Type":"ContainerDied","Data":"12e01f0de67ea47b5a75abcca4644ca8540a5a31fda9069fa43727ce2f66f65c"} Sep 29 22:49:48 crc kubenswrapper[4922]: I0929 22:49:48.166732 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrzms" event={"ID":"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af","Type":"ContainerStarted","Data":"a23ae850f2c468063fe19f0d32d09bca1d31e845941736fc065b18ea9e58f9b7"} Sep 29 22:49:49 crc kubenswrapper[4922]: I0929 22:49:49.180795 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrzms" event={"ID":"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af","Type":"ContainerStarted","Data":"09d3c9961fe51a94cbb67e638d69f1c953c77fd3416065e7764e6030f423c392"} Sep 29 22:49:50 crc kubenswrapper[4922]: I0929 22:49:50.196193 4922 generic.go:334] "Generic (PLEG): container finished" podID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerID="09d3c9961fe51a94cbb67e638d69f1c953c77fd3416065e7764e6030f423c392" exitCode=0 Sep 29 22:49:50 crc kubenswrapper[4922]: I0929 22:49:50.196252 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrzms" event={"ID":"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af","Type":"ContainerDied","Data":"09d3c9961fe51a94cbb67e638d69f1c953c77fd3416065e7764e6030f423c392"} Sep 29 22:49:50 crc kubenswrapper[4922]: I0929 22:49:50.510607 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:50 crc kubenswrapper[4922]: I0929 22:49:50.510721 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:50 crc kubenswrapper[4922]: I0929 22:49:50.601764 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:51 crc kubenswrapper[4922]: I0929 22:49:51.206894 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrzms" event={"ID":"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af","Type":"ContainerStarted","Data":"b6b11d4dde3e719e51ce11f3cf8d847d44fae5dc131176f622d3feba1b377a2b"} Sep 29 22:49:51 crc kubenswrapper[4922]: I0929 22:49:51.245914 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wrzms" podStartSLOduration=2.77129762 podStartE2EDuration="5.245888474s" podCreationTimestamp="2025-09-29 22:49:46 +0000 UTC" firstStartedPulling="2025-09-29 22:49:48.169448921 +0000 UTC m=+1392.479737734" lastFinishedPulling="2025-09-29 22:49:50.644039745 +0000 UTC m=+1394.954328588" observedRunningTime="2025-09-29 22:49:51.229607231 +0000 UTC m=+1395.539896074" watchObservedRunningTime="2025-09-29 22:49:51.245888474 +0000 UTC m=+1395.556177297" Sep 29 22:49:51 crc kubenswrapper[4922]: I0929 22:49:51.253377 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:51 crc kubenswrapper[4922]: I0929 22:49:51.321770 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 22:49:51 crc kubenswrapper[4922]: I0929 22:49:51.322658 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 22:49:51 crc kubenswrapper[4922]: I0929 22:49:51.322718 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 22:49:51 crc kubenswrapper[4922]: I0929 22:49:51.329365 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 22:49:52 crc kubenswrapper[4922]: I0929 22:49:52.217495 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 22:49:52 crc kubenswrapper[4922]: I0929 22:49:52.226522 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 22:49:52 crc kubenswrapper[4922]: I0929 22:49:52.889808 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-v2fc8"] Sep 29 22:49:53 crc kubenswrapper[4922]: I0929 22:49:53.229289 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-v2fc8" podUID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerName="registry-server" containerID="cri-o://ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec" gracePeriod=2 Sep 29 22:49:53 crc kubenswrapper[4922]: I0929 22:49:53.735560 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:53 crc kubenswrapper[4922]: I0929 22:49:53.924630 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgph7\" (UniqueName: \"kubernetes.io/projected/7358ce30-c4a8-48d9-84bb-a2e89c716437-kube-api-access-kgph7\") pod \"7358ce30-c4a8-48d9-84bb-a2e89c716437\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " Sep 29 22:49:53 crc kubenswrapper[4922]: I0929 22:49:53.924754 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-catalog-content\") pod \"7358ce30-c4a8-48d9-84bb-a2e89c716437\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " Sep 29 22:49:53 crc kubenswrapper[4922]: I0929 22:49:53.924937 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-utilities\") pod \"7358ce30-c4a8-48d9-84bb-a2e89c716437\" (UID: \"7358ce30-c4a8-48d9-84bb-a2e89c716437\") " Sep 29 22:49:53 crc kubenswrapper[4922]: I0929 22:49:53.926784 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-utilities" (OuterVolumeSpecName: "utilities") pod "7358ce30-c4a8-48d9-84bb-a2e89c716437" (UID: "7358ce30-c4a8-48d9-84bb-a2e89c716437"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:49:53 crc kubenswrapper[4922]: I0929 22:49:53.933561 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7358ce30-c4a8-48d9-84bb-a2e89c716437-kube-api-access-kgph7" (OuterVolumeSpecName: "kube-api-access-kgph7") pod "7358ce30-c4a8-48d9-84bb-a2e89c716437" (UID: "7358ce30-c4a8-48d9-84bb-a2e89c716437"). InnerVolumeSpecName "kube-api-access-kgph7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:49:53 crc kubenswrapper[4922]: I0929 22:49:53.951235 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7358ce30-c4a8-48d9-84bb-a2e89c716437" (UID: "7358ce30-c4a8-48d9-84bb-a2e89c716437"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.027706 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgph7\" (UniqueName: \"kubernetes.io/projected/7358ce30-c4a8-48d9-84bb-a2e89c716437-kube-api-access-kgph7\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.027754 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.027777 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7358ce30-c4a8-48d9-84bb-a2e89c716437-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.249515 4922 generic.go:334] "Generic (PLEG): container finished" podID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerID="ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec" exitCode=0 Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.249601 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v2fc8" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.249595 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v2fc8" event={"ID":"7358ce30-c4a8-48d9-84bb-a2e89c716437","Type":"ContainerDied","Data":"ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec"} Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.249776 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v2fc8" event={"ID":"7358ce30-c4a8-48d9-84bb-a2e89c716437","Type":"ContainerDied","Data":"3a6a0a56cb7489497abd9ac15895d779d72c8fa34bac557abf535cf5e3e5ec04"} Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.249811 4922 scope.go:117] "RemoveContainer" containerID="ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.289509 4922 scope.go:117] "RemoveContainer" containerID="89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.307356 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-v2fc8"] Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.320124 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-v2fc8"] Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.325491 4922 scope.go:117] "RemoveContainer" containerID="6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.380161 4922 scope.go:117] "RemoveContainer" containerID="ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec" Sep 29 22:49:54 crc kubenswrapper[4922]: E0929 22:49:54.380845 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec\": container with ID starting with ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec not found: ID does not exist" containerID="ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.380930 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec"} err="failed to get container status \"ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec\": rpc error: code = NotFound desc = could not find container \"ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec\": container with ID starting with ba2b243068b0177e7ccb24f39db10f288408703e4e8e8e426b3ae351857335ec not found: ID does not exist" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.380985 4922 scope.go:117] "RemoveContainer" containerID="89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c" Sep 29 22:49:54 crc kubenswrapper[4922]: E0929 22:49:54.381509 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c\": container with ID starting with 89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c not found: ID does not exist" containerID="89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.381589 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c"} err="failed to get container status \"89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c\": rpc error: code = NotFound desc = could not find container \"89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c\": container with ID starting with 89391149649adb8d9e1dfdc4434ff7510b338237261f2eb3474d297f45425a6c not found: ID does not exist" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.381640 4922 scope.go:117] "RemoveContainer" containerID="6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667" Sep 29 22:49:54 crc kubenswrapper[4922]: E0929 22:49:54.382379 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667\": container with ID starting with 6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667 not found: ID does not exist" containerID="6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.382502 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667"} err="failed to get container status \"6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667\": rpc error: code = NotFound desc = could not find container \"6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667\": container with ID starting with 6695f536ad7f09fd0609bd3223254f319bdcd4b66d259279cf7a6f65e92b3667 not found: ID does not exist" Sep 29 22:49:54 crc kubenswrapper[4922]: I0929 22:49:54.437095 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7358ce30-c4a8-48d9-84bb-a2e89c716437" path="/var/lib/kubelet/pods/7358ce30-c4a8-48d9-84bb-a2e89c716437/volumes" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.697252 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-675b6"] Sep 29 22:49:56 crc kubenswrapper[4922]: E0929 22:49:56.697873 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerName="registry-server" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.697886 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerName="registry-server" Sep 29 22:49:56 crc kubenswrapper[4922]: E0929 22:49:56.697898 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerName="extract-utilities" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.697905 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerName="extract-utilities" Sep 29 22:49:56 crc kubenswrapper[4922]: E0929 22:49:56.697932 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerName="extract-content" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.697940 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerName="extract-content" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.698106 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="7358ce30-c4a8-48d9-84bb-a2e89c716437" containerName="registry-server" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.699327 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.728460 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-675b6"] Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.897002 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-catalog-content\") pod \"redhat-operators-675b6\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.897048 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ph69\" (UniqueName: \"kubernetes.io/projected/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-kube-api-access-2ph69\") pod \"redhat-operators-675b6\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.897094 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-utilities\") pod \"redhat-operators-675b6\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.998720 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-catalog-content\") pod \"redhat-operators-675b6\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.999104 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ph69\" (UniqueName: \"kubernetes.io/projected/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-kube-api-access-2ph69\") pod \"redhat-operators-675b6\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.999141 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-utilities\") pod \"redhat-operators-675b6\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.999158 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-catalog-content\") pod \"redhat-operators-675b6\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:56 crc kubenswrapper[4922]: I0929 22:49:56.999416 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-utilities\") pod \"redhat-operators-675b6\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:57 crc kubenswrapper[4922]: I0929 22:49:57.044129 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ph69\" (UniqueName: \"kubernetes.io/projected/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-kube-api-access-2ph69\") pod \"redhat-operators-675b6\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:57 crc kubenswrapper[4922]: I0929 22:49:57.265934 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:57 crc kubenswrapper[4922]: I0929 22:49:57.266118 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:57 crc kubenswrapper[4922]: I0929 22:49:57.317790 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:57 crc kubenswrapper[4922]: I0929 22:49:57.334754 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:49:57 crc kubenswrapper[4922]: I0929 22:49:57.764944 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-675b6"] Sep 29 22:49:58 crc kubenswrapper[4922]: I0929 22:49:58.299601 4922 generic.go:334] "Generic (PLEG): container finished" podID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerID="3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d" exitCode=0 Sep 29 22:49:58 crc kubenswrapper[4922]: I0929 22:49:58.299664 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-675b6" event={"ID":"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4","Type":"ContainerDied","Data":"3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d"} Sep 29 22:49:58 crc kubenswrapper[4922]: I0929 22:49:58.300257 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-675b6" event={"ID":"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4","Type":"ContainerStarted","Data":"8731da116886e3e96b7d31d68f3073f7ca6624ba5166c009a99e71ce542cb329"} Sep 29 22:49:58 crc kubenswrapper[4922]: I0929 22:49:58.388758 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:49:58 crc kubenswrapper[4922]: I0929 22:49:58.913822 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:49:58 crc kubenswrapper[4922]: I0929 22:49:58.913906 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:49:59 crc kubenswrapper[4922]: I0929 22:49:59.314643 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-675b6" event={"ID":"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4","Type":"ContainerStarted","Data":"1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c"} Sep 29 22:49:59 crc kubenswrapper[4922]: I0929 22:49:59.698744 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wrzms"] Sep 29 22:50:00 crc kubenswrapper[4922]: I0929 22:50:00.333100 4922 generic.go:334] "Generic (PLEG): container finished" podID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerID="1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c" exitCode=0 Sep 29 22:50:00 crc kubenswrapper[4922]: I0929 22:50:00.333165 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-675b6" event={"ID":"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4","Type":"ContainerDied","Data":"1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c"} Sep 29 22:50:00 crc kubenswrapper[4922]: I0929 22:50:00.333942 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wrzms" podUID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerName="registry-server" containerID="cri-o://b6b11d4dde3e719e51ce11f3cf8d847d44fae5dc131176f622d3feba1b377a2b" gracePeriod=2 Sep 29 22:50:01 crc kubenswrapper[4922]: I0929 22:50:01.351234 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-675b6" event={"ID":"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4","Type":"ContainerStarted","Data":"780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd"} Sep 29 22:50:01 crc kubenswrapper[4922]: I0929 22:50:01.398794 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-675b6" podStartSLOduration=2.865243581 podStartE2EDuration="5.398758808s" podCreationTimestamp="2025-09-29 22:49:56 +0000 UTC" firstStartedPulling="2025-09-29 22:49:58.302182325 +0000 UTC m=+1402.612471178" lastFinishedPulling="2025-09-29 22:50:00.835697592 +0000 UTC m=+1405.145986405" observedRunningTime="2025-09-29 22:50:01.379053309 +0000 UTC m=+1405.689342162" watchObservedRunningTime="2025-09-29 22:50:01.398758808 +0000 UTC m=+1405.709047661" Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.372670 4922 generic.go:334] "Generic (PLEG): container finished" podID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerID="b6b11d4dde3e719e51ce11f3cf8d847d44fae5dc131176f622d3feba1b377a2b" exitCode=0 Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.372737 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrzms" event={"ID":"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af","Type":"ContainerDied","Data":"b6b11d4dde3e719e51ce11f3cf8d847d44fae5dc131176f622d3feba1b377a2b"} Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.722614 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.824602 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-utilities\") pod \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.824733 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-catalog-content\") pod \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.824885 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dj44c\" (UniqueName: \"kubernetes.io/projected/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-kube-api-access-dj44c\") pod \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\" (UID: \"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af\") " Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.825977 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-utilities" (OuterVolumeSpecName: "utilities") pod "e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" (UID: "e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.834776 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-kube-api-access-dj44c" (OuterVolumeSpecName: "kube-api-access-dj44c") pod "e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" (UID: "e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af"). InnerVolumeSpecName "kube-api-access-dj44c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.901342 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" (UID: "e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.927122 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.927161 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:02 crc kubenswrapper[4922]: I0929 22:50:02.927178 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dj44c\" (UniqueName: \"kubernetes.io/projected/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af-kube-api-access-dj44c\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:03 crc kubenswrapper[4922]: I0929 22:50:03.392694 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wrzms" event={"ID":"e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af","Type":"ContainerDied","Data":"a23ae850f2c468063fe19f0d32d09bca1d31e845941736fc065b18ea9e58f9b7"} Sep 29 22:50:03 crc kubenswrapper[4922]: I0929 22:50:03.392982 4922 scope.go:117] "RemoveContainer" containerID="b6b11d4dde3e719e51ce11f3cf8d847d44fae5dc131176f622d3feba1b377a2b" Sep 29 22:50:03 crc kubenswrapper[4922]: I0929 22:50:03.392798 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wrzms" Sep 29 22:50:03 crc kubenswrapper[4922]: I0929 22:50:03.434049 4922 scope.go:117] "RemoveContainer" containerID="09d3c9961fe51a94cbb67e638d69f1c953c77fd3416065e7764e6030f423c392" Sep 29 22:50:03 crc kubenswrapper[4922]: I0929 22:50:03.440414 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wrzms"] Sep 29 22:50:03 crc kubenswrapper[4922]: I0929 22:50:03.452492 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wrzms"] Sep 29 22:50:03 crc kubenswrapper[4922]: I0929 22:50:03.471969 4922 scope.go:117] "RemoveContainer" containerID="12e01f0de67ea47b5a75abcca4644ca8540a5a31fda9069fa43727ce2f66f65c" Sep 29 22:50:04 crc kubenswrapper[4922]: I0929 22:50:04.442761 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" path="/var/lib/kubelet/pods/e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af/volumes" Sep 29 22:50:07 crc kubenswrapper[4922]: I0929 22:50:07.335262 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:50:07 crc kubenswrapper[4922]: I0929 22:50:07.335996 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:50:07 crc kubenswrapper[4922]: I0929 22:50:07.423094 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:50:07 crc kubenswrapper[4922]: I0929 22:50:07.554062 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:50:07 crc kubenswrapper[4922]: I0929 22:50:07.699961 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-675b6"] Sep 29 22:50:09 crc kubenswrapper[4922]: I0929 22:50:09.487339 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-675b6" podUID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerName="registry-server" containerID="cri-o://780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd" gracePeriod=2 Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.028550 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.173827 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ph69\" (UniqueName: \"kubernetes.io/projected/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-kube-api-access-2ph69\") pod \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.174059 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-utilities\") pod \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.174116 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-catalog-content\") pod \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\" (UID: \"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4\") " Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.176110 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-utilities" (OuterVolumeSpecName: "utilities") pod "b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" (UID: "b03b2ebb-6e24-4e66-999c-ecd0468a8bd4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.181807 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-kube-api-access-2ph69" (OuterVolumeSpecName: "kube-api-access-2ph69") pod "b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" (UID: "b03b2ebb-6e24-4e66-999c-ecd0468a8bd4"). InnerVolumeSpecName "kube-api-access-2ph69". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.271787 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" (UID: "b03b2ebb-6e24-4e66-999c-ecd0468a8bd4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.276633 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ph69\" (UniqueName: \"kubernetes.io/projected/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-kube-api-access-2ph69\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.276660 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.276671 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.501196 4922 generic.go:334] "Generic (PLEG): container finished" podID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerID="780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd" exitCode=0 Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.501250 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-675b6" event={"ID":"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4","Type":"ContainerDied","Data":"780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd"} Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.501294 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-675b6" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.502480 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-675b6" event={"ID":"b03b2ebb-6e24-4e66-999c-ecd0468a8bd4","Type":"ContainerDied","Data":"8731da116886e3e96b7d31d68f3073f7ca6624ba5166c009a99e71ce542cb329"} Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.502678 4922 scope.go:117] "RemoveContainer" containerID="780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.534584 4922 scope.go:117] "RemoveContainer" containerID="1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.538383 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-675b6"] Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.547198 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-675b6"] Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.567296 4922 scope.go:117] "RemoveContainer" containerID="3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.607278 4922 scope.go:117] "RemoveContainer" containerID="780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd" Sep 29 22:50:10 crc kubenswrapper[4922]: E0929 22:50:10.607986 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd\": container with ID starting with 780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd not found: ID does not exist" containerID="780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.608072 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd"} err="failed to get container status \"780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd\": rpc error: code = NotFound desc = could not find container \"780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd\": container with ID starting with 780425a142f892f65bb61d9c8e72e314559b1478fa4043d9bf03439a4b0e4abd not found: ID does not exist" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.608160 4922 scope.go:117] "RemoveContainer" containerID="1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c" Sep 29 22:50:10 crc kubenswrapper[4922]: E0929 22:50:10.608678 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c\": container with ID starting with 1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c not found: ID does not exist" containerID="1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.608741 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c"} err="failed to get container status \"1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c\": rpc error: code = NotFound desc = could not find container \"1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c\": container with ID starting with 1a8aa3f76f570287cde09fab6c20aa1942f9b24cc1445d0fada1b7aa5a64df7c not found: ID does not exist" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.608766 4922 scope.go:117] "RemoveContainer" containerID="3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d" Sep 29 22:50:10 crc kubenswrapper[4922]: E0929 22:50:10.609267 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d\": container with ID starting with 3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d not found: ID does not exist" containerID="3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d" Sep 29 22:50:10 crc kubenswrapper[4922]: I0929 22:50:10.609462 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d"} err="failed to get container status \"3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d\": rpc error: code = NotFound desc = could not find container \"3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d\": container with ID starting with 3f2f203e79d5b7aeffcd6cada85106a7dee85ce5ba1e00906c9f3d34b1e7c28d not found: ID does not exist" Sep 29 22:50:12 crc kubenswrapper[4922]: I0929 22:50:12.441222 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" path="/var/lib/kubelet/pods/b03b2ebb-6e24-4e66-999c-ecd0468a8bd4/volumes" Sep 29 22:50:13 crc kubenswrapper[4922]: I0929 22:50:13.810115 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 29 22:50:13 crc kubenswrapper[4922]: I0929 22:50:13.810381 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="25c9b137-8a15-477d-b87a-b4480c856551" containerName="openstackclient" containerID="cri-o://e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0" gracePeriod=2 Sep 29 22:50:13 crc kubenswrapper[4922]: I0929 22:50:13.826998 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.075185 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.134781 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron818d-account-delete-xtx9k"] Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.135167 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerName="extract-content" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135181 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerName="extract-content" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.135195 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerName="registry-server" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135201 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerName="registry-server" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.135210 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerName="extract-utilities" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135216 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerName="extract-utilities" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.135229 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerName="registry-server" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135234 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerName="registry-server" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.135264 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c9b137-8a15-477d-b87a-b4480c856551" containerName="openstackclient" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135272 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c9b137-8a15-477d-b87a-b4480c856551" containerName="openstackclient" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.135290 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerName="extract-content" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135297 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerName="extract-content" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.135316 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerName="extract-utilities" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135324 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerName="extract-utilities" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135521 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="25c9b137-8a15-477d-b87a-b4480c856551" containerName="openstackclient" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135539 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7ce5da3-8c23-42a3-b2c6-f4a7a21c21af" containerName="registry-server" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.135553 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b03b2ebb-6e24-4e66-999c-ecd0468a8bd4" containerName="registry-server" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.136259 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron818d-account-delete-xtx9k" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.155079 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron818d-account-delete-xtx9k"] Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.155999 4922 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.156056 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data podName:cab5f5be-6bdd-481b-a07b-08491f6f2be5 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:14.656039168 +0000 UTC m=+1418.966327981 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data") pod "rabbitmq-cell1-server-0" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5") : configmap "rabbitmq-cell1-config-data" not found Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.166439 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.181022 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.181657 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="bce38540-6796-48b5-82e7-aad30cf98841" containerName="openstack-network-exporter" containerID="cri-o://0cb28827e71d50623944eacc3ac38f3ced5298c3beaa26d7f6d6c9b64829ff1d" gracePeriod=300 Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.234542 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance185c-account-delete-4n6ff"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.235804 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance185c-account-delete-4n6ff" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.252351 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance185c-account-delete-4n6ff"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.256928 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vwhq\" (UniqueName: \"kubernetes.io/projected/0082d9b7-4b81-47ca-8ba7-61429fdcc678-kube-api-access-5vwhq\") pod \"neutron818d-account-delete-xtx9k\" (UID: \"0082d9b7-4b81-47ca-8ba7-61429fdcc678\") " pod="openstack/neutron818d-account-delete-xtx9k" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.257949 4922 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.258006 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data podName:e56d31de-64f5-42a7-8243-7ac6d992a03d nodeName:}" failed. No retries permitted until 2025-09-29 22:50:14.757988913 +0000 UTC m=+1419.068277726 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data") pod "rabbitmq-server-0" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d") : configmap "rabbitmq-config-data" not found Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.279429 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="bce38540-6796-48b5-82e7-aad30cf98841" containerName="ovsdbserver-nb" containerID="cri-o://f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d" gracePeriod=300 Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.364199 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmwjx\" (UniqueName: \"kubernetes.io/projected/a38d4f6b-b91d-4bb8-9e78-54261e6a285e-kube-api-access-wmwjx\") pod \"glance185c-account-delete-4n6ff\" (UID: \"a38d4f6b-b91d-4bb8-9e78-54261e6a285e\") " pod="openstack/glance185c-account-delete-4n6ff" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.364445 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vwhq\" (UniqueName: \"kubernetes.io/projected/0082d9b7-4b81-47ca-8ba7-61429fdcc678-kube-api-access-5vwhq\") pod \"neutron818d-account-delete-xtx9k\" (UID: \"0082d9b7-4b81-47ca-8ba7-61429fdcc678\") " pod="openstack/neutron818d-account-delete-xtx9k" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.384686 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vwhq\" (UniqueName: \"kubernetes.io/projected/0082d9b7-4b81-47ca-8ba7-61429fdcc678-kube-api-access-5vwhq\") pod \"neutron818d-account-delete-xtx9k\" (UID: \"0082d9b7-4b81-47ca-8ba7-61429fdcc678\") " pod="openstack/neutron818d-account-delete-xtx9k" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.465706 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmwjx\" (UniqueName: \"kubernetes.io/projected/a38d4f6b-b91d-4bb8-9e78-54261e6a285e-kube-api-access-wmwjx\") pod \"glance185c-account-delete-4n6ff\" (UID: \"a38d4f6b-b91d-4bb8-9e78-54261e6a285e\") " pod="openstack/glance185c-account-delete-4n6ff" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.466478 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron818d-account-delete-xtx9k" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.534746 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmwjx\" (UniqueName: \"kubernetes.io/projected/a38d4f6b-b91d-4bb8-9e78-54261e6a285e-kube-api-access-wmwjx\") pod \"glance185c-account-delete-4n6ff\" (UID: \"a38d4f6b-b91d-4bb8-9e78-54261e6a285e\") " pod="openstack/glance185c-account-delete-4n6ff" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.559668 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement66ef-account-delete-nbn4v"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.566098 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement66ef-account-delete-nbn4v" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.572607 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance185c-account-delete-4n6ff" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.594404 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bce38540-6796-48b5-82e7-aad30cf98841/ovsdbserver-nb/0.log" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.594450 4922 generic.go:334] "Generic (PLEG): container finished" podID="bce38540-6796-48b5-82e7-aad30cf98841" containerID="0cb28827e71d50623944eacc3ac38f3ced5298c3beaa26d7f6d6c9b64829ff1d" exitCode=2 Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.594467 4922 generic.go:334] "Generic (PLEG): container finished" podID="bce38540-6796-48b5-82e7-aad30cf98841" containerID="f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d" exitCode=143 Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.594487 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bce38540-6796-48b5-82e7-aad30cf98841","Type":"ContainerDied","Data":"0cb28827e71d50623944eacc3ac38f3ced5298c3beaa26d7f6d6c9b64829ff1d"} Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.594509 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bce38540-6796-48b5-82e7-aad30cf98841","Type":"ContainerDied","Data":"f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d"} Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.597605 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement66ef-account-delete-nbn4v"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.621604 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.622006 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerName="openstack-network-exporter" containerID="cri-o://de3b093fd1fce51dc98ae3bc522017ff5ef8c0b5fedc8d245e4289a030aa5618" gracePeriod=300 Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.692297 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7q6q\" (UniqueName: \"kubernetes.io/projected/b2c58690-3113-44b8-b2df-cbe69dbd26e3-kube-api-access-b7q6q\") pod \"placement66ef-account-delete-nbn4v\" (UID: \"b2c58690-3113-44b8-b2df-cbe69dbd26e3\") " pod="openstack/placement66ef-account-delete-nbn4v" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.692504 4922 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.692548 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data podName:cab5f5be-6bdd-481b-a07b-08491f6f2be5 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:15.692531631 +0000 UTC m=+1420.002820444 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data") pod "rabbitmq-cell1-server-0" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5") : configmap "rabbitmq-cell1-config-data" not found Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.712003 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-b7zw5"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.728535 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-b7zw5"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.735917 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerName="ovsdbserver-sb" containerID="cri-o://2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97" gracePeriod=300 Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.746191 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-6qjb4"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.758806 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-6qjb4"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.769190 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.769478 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="ovn-northd" containerID="cri-o://372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5" gracePeriod=30 Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.769927 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="openstack-network-exporter" containerID="cri-o://18271197116a64d48cb8446e8bb69a2a9e1aea53d596b37826cbb2a61e257443" gracePeriod=30 Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.782414 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.796650 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7q6q\" (UniqueName: \"kubernetes.io/projected/b2c58690-3113-44b8-b2df-cbe69dbd26e3-kube-api-access-b7q6q\") pod \"placement66ef-account-delete-nbn4v\" (UID: \"b2c58690-3113-44b8-b2df-cbe69dbd26e3\") " pod="openstack/placement66ef-account-delete-nbn4v" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.796757 4922 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.796824 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data podName:e56d31de-64f5-42a7-8243-7ac6d992a03d nodeName:}" failed. No retries permitted until 2025-09-29 22:50:15.796806905 +0000 UTC m=+1420.107095718 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data") pod "rabbitmq-server-0" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d") : configmap "rabbitmq-config-data" not found Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.797250 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-vc9v7"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.797540 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-vc9v7" podUID="0f689cf2-292c-47a9-936d-57954d187f5d" containerName="openstack-network-exporter" containerID="cri-o://3db7f086a726daa10073df95a7a2a04b8afb9e0774984976907105555406f660" gracePeriod=30 Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.832120 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-nrr6k"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.879099 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7q6q\" (UniqueName: \"kubernetes.io/projected/b2c58690-3113-44b8-b2df-cbe69dbd26e3-kube-api-access-b7q6q\") pod \"placement66ef-account-delete-nbn4v\" (UID: \"b2c58690-3113-44b8-b2df-cbe69dbd26e3\") " pod="openstack/placement66ef-account-delete-nbn4v" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.904146 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell04139-account-delete-b4vm7"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.905267 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell04139-account-delete-b4vm7" Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.948659 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell04139-account-delete-b4vm7"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.966114 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement66ef-account-delete-nbn4v" Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.983002 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d is running failed: container process not found" containerID="f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d" cmd=["/usr/bin/pidof","ovsdb-server"] Sep 29 22:50:14 crc kubenswrapper[4922]: I0929 22:50:14.993367 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="cab5f5be-6bdd-481b-a07b-08491f6f2be5" containerName="rabbitmq" containerID="cri-o://36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd" gracePeriod=604800 Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.993574 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d is running failed: container process not found" containerID="f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d" cmd=["/usr/bin/pidof","ovsdb-server"] Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.997833 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d is running failed: container process not found" containerID="f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d" cmd=["/usr/bin/pidof","ovsdb-server"] Sep 29 22:50:14 crc kubenswrapper[4922]: E0929 22:50:14.997876 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="bce38540-6796-48b5-82e7-aad30cf98841" containerName="ovsdbserver-nb" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.009305 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cm7m\" (UniqueName: \"kubernetes.io/projected/cd700631-7b12-4e93-9e40-747b09623e7e-kube-api-access-7cm7m\") pod \"novacell04139-account-delete-b4vm7\" (UID: \"cd700631-7b12-4e93-9e40-747b09623e7e\") " pod="openstack/novacell04139-account-delete-b4vm7" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.034330 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-4jkkx"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.091156 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell1fcf1-account-delete-vxvzv"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.113295 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell1fcf1-account-delete-vxvzv" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.123986 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cm7m\" (UniqueName: \"kubernetes.io/projected/cd700631-7b12-4e93-9e40-747b09623e7e-kube-api-access-7cm7m\") pod \"novacell04139-account-delete-b4vm7\" (UID: \"cd700631-7b12-4e93-9e40-747b09623e7e\") " pod="openstack/novacell04139-account-delete-b4vm7" Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.142080 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97 is running failed: container process not found" containerID="2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97" cmd=["/usr/bin/pidof","ovsdb-server"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.143475 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell1fcf1-account-delete-vxvzv"] Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.143994 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97 is running failed: container process not found" containerID="2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97" cmd=["/usr/bin/pidof","ovsdb-server"] Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.159423 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97 is running failed: container process not found" containerID="2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97" cmd=["/usr/bin/pidof","ovsdb-server"] Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.182747 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerName="ovsdbserver-sb" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.223925 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.233714 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27gf5\" (UniqueName: \"kubernetes.io/projected/d0a467a9-053f-4f41-b6b2-529130d42122-kube-api-access-27gf5\") pod \"novacell1fcf1-account-delete-vxvzv\" (UID: \"d0a467a9-053f-4f41-b6b2-529130d42122\") " pod="openstack/novacell1fcf1-account-delete-vxvzv" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.271070 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bce38540-6796-48b5-82e7-aad30cf98841/ovsdbserver-nb/0.log" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.271133 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.281258 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cm7m\" (UniqueName: \"kubernetes.io/projected/cd700631-7b12-4e93-9e40-747b09623e7e-kube-api-access-7cm7m\") pod \"novacell04139-account-delete-b4vm7\" (UID: \"cd700631-7b12-4e93-9e40-747b09623e7e\") " pod="openstack/novacell04139-account-delete-b4vm7" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.300760 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="e56d31de-64f5-42a7-8243-7ac6d992a03d" containerName="rabbitmq" containerID="cri-o://b6074f8dda50ed5b4ce98889541af126fde4d515d920458ec2ced51aad77f19d" gracePeriod=604800 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.344594 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-scripts\") pod \"bce38540-6796-48b5-82e7-aad30cf98841\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.347466 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-scripts" (OuterVolumeSpecName: "scripts") pod "bce38540-6796-48b5-82e7-aad30cf98841" (UID: "bce38540-6796-48b5-82e7-aad30cf98841"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.347487 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-combined-ca-bundle\") pod \"bce38540-6796-48b5-82e7-aad30cf98841\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.347713 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"bce38540-6796-48b5-82e7-aad30cf98841\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.347812 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-metrics-certs-tls-certs\") pod \"bce38540-6796-48b5-82e7-aad30cf98841\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.347879 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bce38540-6796-48b5-82e7-aad30cf98841-ovsdb-rundir\") pod \"bce38540-6796-48b5-82e7-aad30cf98841\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.347959 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-config\") pod \"bce38540-6796-48b5-82e7-aad30cf98841\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.348089 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-946jd\" (UniqueName: \"kubernetes.io/projected/bce38540-6796-48b5-82e7-aad30cf98841-kube-api-access-946jd\") pod \"bce38540-6796-48b5-82e7-aad30cf98841\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.348179 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-ovsdbserver-nb-tls-certs\") pod \"bce38540-6796-48b5-82e7-aad30cf98841\" (UID: \"bce38540-6796-48b5-82e7-aad30cf98841\") " Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.348557 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27gf5\" (UniqueName: \"kubernetes.io/projected/d0a467a9-053f-4f41-b6b2-529130d42122-kube-api-access-27gf5\") pod \"novacell1fcf1-account-delete-vxvzv\" (UID: \"d0a467a9-053f-4f41-b6b2-529130d42122\") " pod="openstack/novacell1fcf1-account-delete-vxvzv" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.348943 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.355039 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-config" (OuterVolumeSpecName: "config") pod "bce38540-6796-48b5-82e7-aad30cf98841" (UID: "bce38540-6796-48b5-82e7-aad30cf98841"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.364687 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bce38540-6796-48b5-82e7-aad30cf98841-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "bce38540-6796-48b5-82e7-aad30cf98841" (UID: "bce38540-6796-48b5-82e7-aad30cf98841"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.376448 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-prv7k"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.394810 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "bce38540-6796-48b5-82e7-aad30cf98841" (UID: "bce38540-6796-48b5-82e7-aad30cf98841"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.420837 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-prv7k"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.421853 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27gf5\" (UniqueName: \"kubernetes.io/projected/d0a467a9-053f-4f41-b6b2-529130d42122-kube-api-access-27gf5\") pod \"novacell1fcf1-account-delete-vxvzv\" (UID: \"d0a467a9-053f-4f41-b6b2-529130d42122\") " pod="openstack/novacell1fcf1-account-delete-vxvzv" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.431620 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bce38540-6796-48b5-82e7-aad30cf98841-kube-api-access-946jd" (OuterVolumeSpecName: "kube-api-access-946jd") pod "bce38540-6796-48b5-82e7-aad30cf98841" (UID: "bce38540-6796-48b5-82e7-aad30cf98841"). InnerVolumeSpecName "kube-api-access-946jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.458042 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.460688 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell1fcf1-account-delete-vxvzv" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.460818 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bce38540-6796-48b5-82e7-aad30cf98841-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.460881 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bce38540-6796-48b5-82e7-aad30cf98841-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.461782 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-946jd\" (UniqueName: \"kubernetes.io/projected/bce38540-6796-48b5-82e7-aad30cf98841-kube-api-access-946jd\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.475120 4922 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" secret="" err="secret \"dnsmasq-dns-dockercfg-2xnvb\" not found" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.499596 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.522564 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-782mg"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.536618 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell04139-account-delete-b4vm7" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.563770 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-782mg"] Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.563900 4922 configmap.go:193] Couldn't get configMap openstack/dns-svc: configmap "dns-svc" not found Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.563955 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc podName:a41c5e41-6db6-44dc-989d-d7a8ed8ae091 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:16.063938376 +0000 UTC m=+1420.374227189 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc") pod "dnsmasq-dns-5c7b6c5df9-kl2pv" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091") : configmap "dns-svc" not found Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.564034 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.564152 4922 configmap.go:193] Couldn't get configMap openstack/ovsdbserver-nb: configmap "ovsdbserver-nb" not found Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.564180 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb podName:a41c5e41-6db6-44dc-989d-d7a8ed8ae091 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:16.064173052 +0000 UTC m=+1420.374461865 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb" (UniqueName: "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb") pod "dnsmasq-dns-5c7b6c5df9-kl2pv" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091") : configmap "ovsdbserver-nb" not found Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.564207 4922 configmap.go:193] Couldn't get configMap openstack/dns: configmap "dns" not found Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.564224 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config podName:a41c5e41-6db6-44dc-989d-d7a8ed8ae091 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:16.064218683 +0000 UTC m=+1420.374507496 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config") pod "dnsmasq-dns-5c7b6c5df9-kl2pv" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091") : configmap "dns" not found Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.565745 4922 configmap.go:193] Couldn't get configMap openstack/ovsdbserver-sb: configmap "ovsdbserver-sb" not found Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.566076 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb podName:a41c5e41-6db6-44dc-989d-d7a8ed8ae091 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:16.06605787 +0000 UTC m=+1420.376346683 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb" (UniqueName: "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb") pod "dnsmasq-dns-5c7b6c5df9-kl2pv" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091") : configmap "ovsdbserver-sb" not found Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.576849 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-zvnng"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.591360 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-zvnng"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.607215 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron818d-account-delete-xtx9k"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.641449 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-656896d5d5-fczbx"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.641737 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-656896d5d5-fczbx" podUID="cb84f99c-6d00-4023-9520-372992f3646e" containerName="neutron-api" containerID="cri-o://ee6bfbd535ddb2568b60ca0420863ffa93c242860084bfd9305faf5ae6f7c154" gracePeriod=30 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.645632 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-656896d5d5-fczbx" podUID="cb84f99c-6d00-4023-9520-372992f3646e" containerName="neutron-httpd" containerID="cri-o://0cb082da33df2e4d81994a52b8d0e177856277b29b39509b8cec8831f4d69eb3" gracePeriod=30 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.657539 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kl2pv"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.675690 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.676139 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="447099dc-1eea-4510-8b94-faa6899f6b06" containerName="glance-log" containerID="cri-o://ccbfc95659bd8ac0f5153a3e15fe5796f00cb82dbede7bd19b2c1d755699bbe7" gracePeriod=30 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.676279 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="447099dc-1eea-4510-8b94-faa6899f6b06" containerName="glance-httpd" containerID="cri-o://4bade6528d890c812a2f6c25a51fb063416552fbc042c3c5effa4226f2415177" gracePeriod=30 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.684897 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bce38540-6796-48b5-82e7-aad30cf98841" (UID: "bce38540-6796-48b5-82e7-aad30cf98841"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.690200 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-mqg7f"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.707122 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "bce38540-6796-48b5-82e7-aad30cf98841" (UID: "bce38540-6796-48b5-82e7-aad30cf98841"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.778648 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.778911 4922 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.778982 4922 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.779055 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data podName:cab5f5be-6bdd-481b-a07b-08491f6f2be5 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:17.779038239 +0000 UTC m=+1422.089327052 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data") pod "rabbitmq-cell1-server-0" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5") : configmap "rabbitmq-cell1-config-data" not found Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.779508 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.795647 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.798126 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-vc9v7_0f689cf2-292c-47a9-936d-57954d187f5d/openstack-network-exporter/0.log" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.798172 4922 generic.go:334] "Generic (PLEG): container finished" podID="0f689cf2-292c-47a9-936d-57954d187f5d" containerID="3db7f086a726daa10073df95a7a2a04b8afb9e0774984976907105555406f660" exitCode=2 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.798285 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-vc9v7" event={"ID":"0f689cf2-292c-47a9-936d-57954d187f5d","Type":"ContainerDied","Data":"3db7f086a726daa10073df95a7a2a04b8afb9e0774984976907105555406f660"} Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.811321 4922 generic.go:334] "Generic (PLEG): container finished" podID="014a5aba-d41a-4647-8459-c770534a4a60" containerID="18271197116a64d48cb8446e8bb69a2a9e1aea53d596b37826cbb2a61e257443" exitCode=2 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.811467 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"014a5aba-d41a-4647-8459-c770534a4a60","Type":"ContainerDied","Data":"18271197116a64d48cb8446e8bb69a2a9e1aea53d596b37826cbb2a61e257443"} Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.811547 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.811604 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="ovn-northd" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.871878 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_bce38540-6796-48b5-82e7-aad30cf98841/ovsdbserver-nb/0.log" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.871962 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"bce38540-6796-48b5-82e7-aad30cf98841","Type":"ContainerDied","Data":"af4d8dd2788db06ecbf18e8b922a9ac7b1fed4d998dfaa887d1dff24df9810d9"} Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.872007 4922 scope.go:117] "RemoveContainer" containerID="0cb28827e71d50623944eacc3ac38f3ced5298c3beaa26d7f6d6c9b64829ff1d" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.872248 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.884648 4922 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 29 22:50:15 crc kubenswrapper[4922]: E0929 22:50:15.884720 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data podName:e56d31de-64f5-42a7-8243-7ac6d992a03d nodeName:}" failed. No retries permitted until 2025-09-29 22:50:17.884697138 +0000 UTC m=+1422.194985951 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data") pod "rabbitmq-server-0" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d") : configmap "rabbitmq-config-data" not found Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.897129 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_4a7323e3-8b0e-4f74-b0f4-73c5874fe361/ovsdbserver-sb/0.log" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.897174 4922 generic.go:334] "Generic (PLEG): container finished" podID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerID="de3b093fd1fce51dc98ae3bc522017ff5ef8c0b5fedc8d245e4289a030aa5618" exitCode=2 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.897190 4922 generic.go:334] "Generic (PLEG): container finished" podID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerID="2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97" exitCode=143 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.897272 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"4a7323e3-8b0e-4f74-b0f4-73c5874fe361","Type":"ContainerDied","Data":"de3b093fd1fce51dc98ae3bc522017ff5ef8c0b5fedc8d245e4289a030aa5618"} Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.897296 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"4a7323e3-8b0e-4f74-b0f4-73c5874fe361","Type":"ContainerDied","Data":"2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97"} Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.903499 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-mqg7f"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.946739 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-n4p4t"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.951792 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" podUID="a41c5e41-6db6-44dc-989d-d7a8ed8ae091" containerName="dnsmasq-dns" containerID="cri-o://bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c" gracePeriod=10 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.952157 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron818d-account-delete-xtx9k" event={"ID":"0082d9b7-4b81-47ca-8ba7-61429fdcc678","Type":"ContainerStarted","Data":"e76daf748777017a5acd06b97290f6a97953429821f9c26e34b0a82de51481fc"} Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.958906 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-n4p4t"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.963744 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-vc9v7_0f689cf2-292c-47a9-936d-57954d187f5d/openstack-network-exporter/0.log" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.963811 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.977781 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.978045 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5b8254ca-83c1-49a8-b453-107577b54f01" containerName="glance-log" containerID="cri-o://f260bc8fa0de7751b280365c6bd0a0f523435c779c5725e4de6df27a0478f19c" gracePeriod=30 Sep 29 22:50:15 crc kubenswrapper[4922]: I0929 22:50:15.978234 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="5b8254ca-83c1-49a8-b453-107577b54f01" containerName="glance-httpd" containerID="cri-o://e9c29c8849b943f694f98cfb6baeef978eb44c7f2f718c45a1d5e25db0e098e6" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.996549 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.996944 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-server" containerID="cri-o://6e52d87702c312bbb2e29a490519b8aa109bb12950e8b0a94d326f1b63f93999" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997048 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="swift-recon-cron" containerID="cri-o://d96b9721a809407b59045e31403c469338494e50e97df48dd1a0aa74503cb5bd" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997088 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="rsync" containerID="cri-o://27aeadd45b13c851d87c45f05a21adf10459ae93d03fae69b6ab3347a3cd7d2b" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997115 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-expirer" containerID="cri-o://9c2b949ae2010cd19044ec6c16936ffd099b6ea65673b3704a021c3323514b40" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997142 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-updater" containerID="cri-o://e41c7b951d5523f493d3e44c422eb2f476674ac694ba6e3a443bb314f8068bab" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997169 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-auditor" containerID="cri-o://1f217165de12b63c91e1fbd871ad07d3070b8407a3d9750bde397f3c7a1cc356" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997195 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-replicator" containerID="cri-o://7dc1ecd4e9d792ad830b6b3cddec0aca87a6fc32dfad2067e4fa602b228af523" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997223 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-server" containerID="cri-o://6c064216482398df313773fb9964e1b8586650597558efb0a3a312e7dde29596" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997250 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-updater" containerID="cri-o://54464affff56c245302f16939d7871865704b43ae97eb183cd35b66f93385f35" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997277 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-auditor" containerID="cri-o://aed3f79c8434a0f0105df5fe72412ca9bc5f53d2f122d6b27023c5a8f5c61342" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997304 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-replicator" containerID="cri-o://e249fe7a191a944cff40c8c92e3c4958f89cf9fbd1f5d1322ff75e0f69defdff" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997331 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-server" containerID="cri-o://4675d2c0679cc4f58a6d8737c63a65ad973c3433c64759dcea3d5deff22e30fb" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997357 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-reaper" containerID="cri-o://f166640120faeaa707308969f390573411f97a3309e54ac63df05aebb3f19824" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997383 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-auditor" containerID="cri-o://394ac56913d9a5c9d5e8f0211780ebf922fd0554782e59a3d6d87d16da29195d" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:15.997428 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-replicator" containerID="cri-o://d2f084fa2f64aff150659598e27fe358fc89e0c61c6100a7520978fcf0f7a916" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.008327 4922 scope.go:117] "RemoveContainer" containerID="f3dfa7a7bf4ce46b67b550b97ad8df60421887eac8a172dc0038df397071251d" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.016855 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-9lrwb"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.038936 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-9lrwb"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.075662 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-sxbsj"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.093411 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-combined-ca-bundle\") pod \"0f689cf2-292c-47a9-936d-57954d187f5d\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.093519 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f689cf2-292c-47a9-936d-57954d187f5d-config\") pod \"0f689cf2-292c-47a9-936d-57954d187f5d\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.093590 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnpgq\" (UniqueName: \"kubernetes.io/projected/0f689cf2-292c-47a9-936d-57954d187f5d-kube-api-access-hnpgq\") pod \"0f689cf2-292c-47a9-936d-57954d187f5d\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.093610 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovs-rundir\") pod \"0f689cf2-292c-47a9-936d-57954d187f5d\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.093641 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovn-rundir\") pod \"0f689cf2-292c-47a9-936d-57954d187f5d\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.095811 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-metrics-certs-tls-certs\") pod \"0f689cf2-292c-47a9-936d-57954d187f5d\" (UID: \"0f689cf2-292c-47a9-936d-57954d187f5d\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.096452 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "0f689cf2-292c-47a9-936d-57954d187f5d" (UID: "0f689cf2-292c-47a9-936d-57954d187f5d"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.096707 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "0f689cf2-292c-47a9-936d-57954d187f5d" (UID: "0f689cf2-292c-47a9-936d-57954d187f5d"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.097048 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f689cf2-292c-47a9-936d-57954d187f5d-config" (OuterVolumeSpecName: "config") pod "0f689cf2-292c-47a9-936d-57954d187f5d" (UID: "0f689cf2-292c-47a9-936d-57954d187f5d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.098552 4922 configmap.go:193] Couldn't get configMap openstack/ovsdbserver-sb: configmap "ovsdbserver-sb" not found Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.098610 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb podName:a41c5e41-6db6-44dc-989d-d7a8ed8ae091 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:17.098594371 +0000 UTC m=+1421.408883184 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb" (UniqueName: "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb") pod "dnsmasq-dns-5c7b6c5df9-kl2pv" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091") : configmap "ovsdbserver-sb" not found Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.098865 4922 configmap.go:193] Couldn't get configMap openstack/dns-svc: configmap "dns-svc" not found Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.098894 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc podName:a41c5e41-6db6-44dc-989d-d7a8ed8ae091 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:17.098886159 +0000 UTC m=+1421.409174972 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc") pod "dnsmasq-dns-5c7b6c5df9-kl2pv" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091") : configmap "dns-svc" not found Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.101850 4922 configmap.go:193] Couldn't get configMap openstack/ovsdbserver-nb: configmap "ovsdbserver-nb" not found Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.101912 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb podName:a41c5e41-6db6-44dc-989d-d7a8ed8ae091 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:17.101894435 +0000 UTC m=+1421.412183248 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb" (UniqueName: "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb") pod "dnsmasq-dns-5c7b6c5df9-kl2pv" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091") : configmap "ovsdbserver-nb" not found Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.102207 4922 configmap.go:193] Couldn't get configMap openstack/dns: configmap "dns" not found Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.102271 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config podName:a41c5e41-6db6-44dc-989d-d7a8ed8ae091 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:17.102254354 +0000 UTC m=+1421.412543157 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config") pod "dnsmasq-dns-5c7b6c5df9-kl2pv" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091") : configmap "dns" not found Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.103367 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f689cf2-292c-47a9-936d-57954d187f5d-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.103431 4922 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovs-rundir\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.103440 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/0f689cf2-292c-47a9-936d-57954d187f5d-ovn-rundir\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.103487 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-sxbsj"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.126786 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-185c-account-create-6bgh5"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.136968 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f689cf2-292c-47a9-936d-57954d187f5d-kube-api-access-hnpgq" (OuterVolumeSpecName: "kube-api-access-hnpgq") pod "0f689cf2-292c-47a9-936d-57954d187f5d" (UID: "0f689cf2-292c-47a9-936d-57954d187f5d"). InnerVolumeSpecName "kube-api-access-hnpgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.141490 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "bce38540-6796-48b5-82e7-aad30cf98841" (UID: "bce38540-6796-48b5-82e7-aad30cf98841"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.149187 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-185c-account-create-6bgh5"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.163346 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-7pc4b"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.178517 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-7pc4b"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.196378 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance185c-account-delete-4n6ff"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.210010 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bce38540-6796-48b5-82e7-aad30cf98841-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.210041 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnpgq\" (UniqueName: \"kubernetes.io/projected/0f689cf2-292c-47a9-936d-57954d187f5d-kube-api-access-hnpgq\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.236014 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-3d37-account-create-9lzmk"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.246040 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-3d37-account-create-9lzmk"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.247131 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_4a7323e3-8b0e-4f74-b0f4-73c5874fe361/ovsdbserver-sb/0.log" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.247196 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.260967 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.261204 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerName="cinder-scheduler" containerID="cri-o://70b0fa4952e40bc0e0d7fd5d77a22557f9abd49c9cf9a6a2477a7399f2433c1f" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.261330 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerName="probe" containerID="cri-o://db4826cac698823aad07ded4c68c796267271768ac310ffcee02df8874d50b96" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.271249 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f689cf2-292c-47a9-936d-57954d187f5d" (UID: "0f689cf2-292c-47a9-936d-57954d187f5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.305285 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.305562 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerName="cinder-api-log" containerID="cri-o://1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.305684 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerName="cinder-api" containerID="cri-o://e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.309870 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" containerID="cri-o://cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" gracePeriod=29 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.320453 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.327232 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-5d7mn"] Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.360645 4922 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Sep 29 22:50:16 crc kubenswrapper[4922]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Sep 29 22:50:16 crc kubenswrapper[4922]: + source /usr/local/bin/container-scripts/functions Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNBridge=br-int Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNRemote=tcp:localhost:6642 Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNEncapType=geneve Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNAvailabilityZones= Sep 29 22:50:16 crc kubenswrapper[4922]: ++ EnableChassisAsGateway=true Sep 29 22:50:16 crc kubenswrapper[4922]: ++ PhysicalNetworks= Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNHostName= Sep 29 22:50:16 crc kubenswrapper[4922]: ++ DB_FILE=/etc/openvswitch/conf.db Sep 29 22:50:16 crc kubenswrapper[4922]: ++ ovs_dir=/var/lib/openvswitch Sep 29 22:50:16 crc kubenswrapper[4922]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Sep 29 22:50:16 crc kubenswrapper[4922]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Sep 29 22:50:16 crc kubenswrapper[4922]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 29 22:50:16 crc kubenswrapper[4922]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 29 22:50:16 crc kubenswrapper[4922]: + sleep 0.5 Sep 29 22:50:16 crc kubenswrapper[4922]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 29 22:50:16 crc kubenswrapper[4922]: + cleanup_ovsdb_server_semaphore Sep 29 22:50:16 crc kubenswrapper[4922]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 29 22:50:16 crc kubenswrapper[4922]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Sep 29 22:50:16 crc kubenswrapper[4922]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-4jkkx" message=< Sep 29 22:50:16 crc kubenswrapper[4922]: Exiting ovsdb-server (5) [ OK ] Sep 29 22:50:16 crc kubenswrapper[4922]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Sep 29 22:50:16 crc kubenswrapper[4922]: + source /usr/local/bin/container-scripts/functions Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNBridge=br-int Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNRemote=tcp:localhost:6642 Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNEncapType=geneve Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNAvailabilityZones= Sep 29 22:50:16 crc kubenswrapper[4922]: ++ EnableChassisAsGateway=true Sep 29 22:50:16 crc kubenswrapper[4922]: ++ PhysicalNetworks= Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNHostName= Sep 29 22:50:16 crc kubenswrapper[4922]: ++ DB_FILE=/etc/openvswitch/conf.db Sep 29 22:50:16 crc kubenswrapper[4922]: ++ ovs_dir=/var/lib/openvswitch Sep 29 22:50:16 crc kubenswrapper[4922]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Sep 29 22:50:16 crc kubenswrapper[4922]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Sep 29 22:50:16 crc kubenswrapper[4922]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 29 22:50:16 crc kubenswrapper[4922]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 29 22:50:16 crc kubenswrapper[4922]: + sleep 0.5 Sep 29 22:50:16 crc kubenswrapper[4922]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 29 22:50:16 crc kubenswrapper[4922]: + cleanup_ovsdb_server_semaphore Sep 29 22:50:16 crc kubenswrapper[4922]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 29 22:50:16 crc kubenswrapper[4922]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Sep 29 22:50:16 crc kubenswrapper[4922]: > Sep 29 22:50:16 crc kubenswrapper[4922]: E0929 22:50:16.360699 4922 kuberuntime_container.go:691] "PreStop hook failed" err=< Sep 29 22:50:16 crc kubenswrapper[4922]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Sep 29 22:50:16 crc kubenswrapper[4922]: + source /usr/local/bin/container-scripts/functions Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNBridge=br-int Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNRemote=tcp:localhost:6642 Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNEncapType=geneve Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNAvailabilityZones= Sep 29 22:50:16 crc kubenswrapper[4922]: ++ EnableChassisAsGateway=true Sep 29 22:50:16 crc kubenswrapper[4922]: ++ PhysicalNetworks= Sep 29 22:50:16 crc kubenswrapper[4922]: ++ OVNHostName= Sep 29 22:50:16 crc kubenswrapper[4922]: ++ DB_FILE=/etc/openvswitch/conf.db Sep 29 22:50:16 crc kubenswrapper[4922]: ++ ovs_dir=/var/lib/openvswitch Sep 29 22:50:16 crc kubenswrapper[4922]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Sep 29 22:50:16 crc kubenswrapper[4922]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Sep 29 22:50:16 crc kubenswrapper[4922]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 29 22:50:16 crc kubenswrapper[4922]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 29 22:50:16 crc kubenswrapper[4922]: + sleep 0.5 Sep 29 22:50:16 crc kubenswrapper[4922]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Sep 29 22:50:16 crc kubenswrapper[4922]: + cleanup_ovsdb_server_semaphore Sep 29 22:50:16 crc kubenswrapper[4922]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Sep 29 22:50:16 crc kubenswrapper[4922]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Sep 29 22:50:16 crc kubenswrapper[4922]: > pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" containerID="cri-o://2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.360731 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" containerID="cri-o://2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" gracePeriod=29 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.363655 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-5d7mn"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.374636 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-3b06-account-create-4k8fh"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.396013 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-3b06-account-create-4k8fh"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.405347 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-66ef-account-create-tv2kg"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.412306 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-66ef-account-create-tv2kg"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.419498 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-556f68d56-jxmlq"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.419773 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-556f68d56-jxmlq" podUID="51f81c86-8f6d-4506-a940-5015032df5bd" containerName="placement-log" containerID="cri-o://b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.420168 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-556f68d56-jxmlq" podUID="51f81c86-8f6d-4506-a940-5015032df5bd" containerName="placement-api" containerID="cri-o://114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.431380 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdbserver-sb-tls-certs\") pod \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.431434 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgxfx\" (UniqueName: \"kubernetes.io/projected/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-kube-api-access-qgxfx\") pod \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.431470 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdb-rundir\") pod \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.431503 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-metrics-certs-tls-certs\") pod \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.431526 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.431585 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-combined-ca-bundle\") pod \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.431605 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-config\") pod \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.431683 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-scripts\") pod \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\" (UID: \"4a7323e3-8b0e-4f74-b0f4-73c5874fe361\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.432802 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-scripts" (OuterVolumeSpecName: "scripts") pod "4a7323e3-8b0e-4f74-b0f4-73c5874fe361" (UID: "4a7323e3-8b0e-4f74-b0f4-73c5874fe361"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.443219 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "4a7323e3-8b0e-4f74-b0f4-73c5874fe361" (UID: "4a7323e3-8b0e-4f74-b0f4-73c5874fe361"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.444184 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "4a7323e3-8b0e-4f74-b0f4-73c5874fe361" (UID: "4a7323e3-8b0e-4f74-b0f4-73c5874fe361"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.444637 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-config" (OuterVolumeSpecName: "config") pod "4a7323e3-8b0e-4f74-b0f4-73c5874fe361" (UID: "4a7323e3-8b0e-4f74-b0f4-73c5874fe361"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.480107 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-kube-api-access-qgxfx" (OuterVolumeSpecName: "kube-api-access-qgxfx") pod "4a7323e3-8b0e-4f74-b0f4-73c5874fe361" (UID: "4a7323e3-8b0e-4f74-b0f4-73c5874fe361"). InnerVolumeSpecName "kube-api-access-qgxfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.549668 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.549702 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgxfx\" (UniqueName: \"kubernetes.io/projected/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-kube-api-access-qgxfx\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.549713 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.549745 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.549757 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.567914 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="191228de-91b1-48d9-a5ac-48846dddf3ed" path="/var/lib/kubelet/pods/191228de-91b1-48d9-a5ac-48846dddf3ed/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.579545 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dc887c5-8fe3-46a0-af31-64c0b95dfcbf" path="/var/lib/kubelet/pods/2dc887c5-8fe3-46a0-af31-64c0b95dfcbf/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.580277 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43650a1d-3702-40e1-b4ef-2cc2f2343c28" path="/var/lib/kubelet/pods/43650a1d-3702-40e1-b4ef-2cc2f2343c28/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.583146 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43fe2c56-fd4b-4e01-9550-49d15df8264f" path="/var/lib/kubelet/pods/43fe2c56-fd4b-4e01-9550-49d15df8264f/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.584140 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45cdffc3-9762-433a-b1d8-7e6af001c0ec" path="/var/lib/kubelet/pods/45cdffc3-9762-433a-b1d8-7e6af001c0ec/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.584938 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c673b83-04df-456d-b26c-6384ecfb5924" path="/var/lib/kubelet/pods/7c673b83-04df-456d-b26c-6384ecfb5924/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.587520 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8880043f-1bff-4e22-9f42-06d44ad027f8" path="/var/lib/kubelet/pods/8880043f-1bff-4e22-9f42-06d44ad027f8/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.589786 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90cbdc4e-53d3-4732-9239-6e2a46dcd4b0" path="/var/lib/kubelet/pods/90cbdc4e-53d3-4732-9239-6e2a46dcd4b0/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.590919 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dc3f0ae-3193-44e1-9bad-edbbd00a94ea" path="/var/lib/kubelet/pods/9dc3f0ae-3193-44e1-9bad-edbbd00a94ea/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.591571 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c069ee2e-ba38-4d12-8090-81842b86051a" path="/var/lib/kubelet/pods/c069ee2e-ba38-4d12-8090-81842b86051a/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.592118 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb59351d-3f5c-457d-a010-a5f48104cd03" path="/var/lib/kubelet/pods/cb59351d-3f5c-457d-a010-a5f48104cd03/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.598263 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3d2683e-4488-4dc2-8362-75b9068ce7e6" path="/var/lib/kubelet/pods/e3d2683e-4488-4dc2-8362-75b9068ce7e6/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.598971 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4df9702-b583-4c89-8412-c99de320208c" path="/var/lib/kubelet/pods/e4df9702-b583-4c89-8412-c99de320208c/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.599528 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f219fe95-4c9e-46af-b01c-f39503f1ca4e" path="/var/lib/kubelet/pods/f219fe95-4c9e-46af-b01c-f39503f1ca4e/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.604189 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7a17d6c-65b4-4006-8d06-9942b01955d1" path="/var/lib/kubelet/pods/f7a17d6c-65b4-4006-8d06-9942b01955d1/volumes" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.620493 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement66ef-account-delete-nbn4v"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.620534 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-9hwh7"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.620549 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-9hwh7"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.620564 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.620586 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.620597 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance185c-account-delete-4n6ff"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.620606 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.621129 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-log" containerID="cri-o://e3c85f65a2283ba365db1c99edfb8fc2be5ee33fe88ee44750524b8d792b1cbf" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.621378 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-metadata" containerID="cri-o://3480a1ee34671883eedf597274a6f64fbdd34cfd4e44a319ae58d788dace7d36" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.648301 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.648590 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.649147 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-log" containerID="cri-o://6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.649649 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-api" containerID="cri-o://19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.662728 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-pl8xb"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.672951 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-pl8xb"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.764545 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "0f689cf2-292c-47a9-936d-57954d187f5d" (UID: "0f689cf2-292c-47a9-936d-57954d187f5d"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.764599 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-9bbc-account-create-jjb4q"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.775962 4922 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f689cf2-292c-47a9-936d-57954d187f5d-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.793938 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a7323e3-8b0e-4f74-b0f4-73c5874fe361" (UID: "4a7323e3-8b0e-4f74-b0f4-73c5874fe361"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.794010 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-9bbc-account-create-jjb4q"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.811788 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "4a7323e3-8b0e-4f74-b0f4-73c5874fe361" (UID: "4a7323e3-8b0e-4f74-b0f4-73c5874fe361"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.821818 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.859442 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-8cxz7"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.873912 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.882362 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.883223 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-8cxz7"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.890590 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.890619 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.890633 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.900102 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-fcf1-account-create-cdp2t"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.917025 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell1fcf1-account-delete-vxvzv"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.929650 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-8f78686f5-pj8pr"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.929926 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-8f78686f5-pj8pr" podUID="98ffad34-9721-4849-84ba-f14c518250ac" containerName="barbican-worker-log" containerID="cri-o://8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.930340 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-8f78686f5-pj8pr" podUID="98ffad34-9721-4849-84ba-f14c518250ac" containerName="barbican-worker" containerID="cri-o://71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.939492 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-fcf1-account-create-cdp2t"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.949184 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "4a7323e3-8b0e-4f74-b0f4-73c5874fe361" (UID: "4a7323e3-8b0e-4f74-b0f4-73c5874fe361"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.953868 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-5fc58fc6cf-b5wq2"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.954204 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" podUID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerName="proxy-httpd" containerID="cri-o://108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.954340 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" podUID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerName="proxy-server" containerID="cri-o://68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.968001 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.968642 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" podUID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerName="barbican-keystone-listener-log" containerID="cri-o://a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.969078 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" podUID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerName="barbican-keystone-listener" containerID="cri-o://3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.981600 4922 generic.go:334] "Generic (PLEG): container finished" podID="0082d9b7-4b81-47ca-8ba7-61429fdcc678" containerID="a619712671e1ee8156ae552fb5f6eff0cfb3c1fee263e8762a56b0dafcc7ed41" exitCode=0 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.982033 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron818d-account-delete-xtx9k" event={"ID":"0082d9b7-4b81-47ca-8ba7-61429fdcc678","Type":"ContainerDied","Data":"a619712671e1ee8156ae552fb5f6eff0cfb3c1fee263e8762a56b0dafcc7ed41"} Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.988423 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-75fb76f858-mmqwn"] Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.988733 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-75fb76f858-mmqwn" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api-log" containerID="cri-o://172f258b5cd1d2a007268349f6866d6235b2344ef34219c5f1c44b3260c97af7" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.988869 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-75fb76f858-mmqwn" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api" containerID="cri-o://6900ea52bd3a44fd3677b19a3a356664f86ce5fb715b38eadb63dcaaa0a2a2c5" gracePeriod=30 Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992047 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffzjt\" (UniqueName: \"kubernetes.io/projected/25c9b137-8a15-477d-b87a-b4480c856551-kube-api-access-ffzjt\") pod \"25c9b137-8a15-477d-b87a-b4480c856551\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992122 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-swift-storage-0\") pod \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992225 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb\") pod \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992284 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc\") pod \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992309 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-combined-ca-bundle\") pod \"25c9b137-8a15-477d-b87a-b4480c856551\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992347 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47dtk\" (UniqueName: \"kubernetes.io/projected/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-kube-api-access-47dtk\") pod \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992407 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config\") pod \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992454 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config\") pod \"25c9b137-8a15-477d-b87a-b4480c856551\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992495 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config-secret\") pod \"25c9b137-8a15-477d-b87a-b4480c856551\" (UID: \"25c9b137-8a15-477d-b87a-b4480c856551\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992570 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb\") pod \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\" (UID: \"a41c5e41-6db6-44dc-989d-d7a8ed8ae091\") " Sep 29 22:50:16 crc kubenswrapper[4922]: I0929 22:50:16.992980 4922 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a7323e3-8b0e-4f74-b0f4-73c5874fe361-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.003782 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" containerName="galera" containerID="cri-o://db4919298398abfdfcd2f3e6d56abfb1dbc96dc63483648cf678375784ec8d59" gracePeriod=30 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.004907 4922 generic.go:334] "Generic (PLEG): container finished" podID="a41c5e41-6db6-44dc-989d-d7a8ed8ae091" containerID="bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.008954 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.010228 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" event={"ID":"a41c5e41-6db6-44dc-989d-d7a8ed8ae091","Type":"ContainerDied","Data":"bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.010300 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kl2pv" event={"ID":"a41c5e41-6db6-44dc-989d-d7a8ed8ae091","Type":"ContainerDied","Data":"f26577b05af0203a7b02b4066c4acd21a533001ed1bbdc676fb82713899454f9"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.010319 4922 scope.go:117] "RemoveContainer" containerID="bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.017306 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25c9b137-8a15-477d-b87a-b4480c856551-kube-api-access-ffzjt" (OuterVolumeSpecName: "kube-api-access-ffzjt") pod "25c9b137-8a15-477d-b87a-b4480c856551" (UID: "25c9b137-8a15-477d-b87a-b4480c856551"). InnerVolumeSpecName "kube-api-access-ffzjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.018545 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="db4919298398abfdfcd2f3e6d56abfb1dbc96dc63483648cf678375784ec8d59" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019628 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="27aeadd45b13c851d87c45f05a21adf10459ae93d03fae69b6ab3347a3cd7d2b" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019653 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="9c2b949ae2010cd19044ec6c16936ffd099b6ea65673b3704a021c3323514b40" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019661 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="e41c7b951d5523f493d3e44c422eb2f476674ac694ba6e3a443bb314f8068bab" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019668 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="1f217165de12b63c91e1fbd871ad07d3070b8407a3d9750bde397f3c7a1cc356" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019676 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="7dc1ecd4e9d792ad830b6b3cddec0aca87a6fc32dfad2067e4fa602b228af523" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019683 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="6c064216482398df313773fb9964e1b8586650597558efb0a3a312e7dde29596" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019691 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="54464affff56c245302f16939d7871865704b43ae97eb183cd35b66f93385f35" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019697 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="aed3f79c8434a0f0105df5fe72412ca9bc5f53d2f122d6b27023c5a8f5c61342" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019703 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="e249fe7a191a944cff40c8c92e3c4958f89cf9fbd1f5d1322ff75e0f69defdff" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019710 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="4675d2c0679cc4f58a6d8737c63a65ad973c3433c64759dcea3d5deff22e30fb" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019716 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="f166640120faeaa707308969f390573411f97a3309e54ac63df05aebb3f19824" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019724 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="394ac56913d9a5c9d5e8f0211780ebf922fd0554782e59a3d6d87d16da29195d" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019730 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="d2f084fa2f64aff150659598e27fe358fc89e0c61c6100a7520978fcf0f7a916" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019738 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="6e52d87702c312bbb2e29a490519b8aa109bb12950e8b0a94d326f1b63f93999" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019814 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"27aeadd45b13c851d87c45f05a21adf10459ae93d03fae69b6ab3347a3cd7d2b"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019843 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"9c2b949ae2010cd19044ec6c16936ffd099b6ea65673b3704a021c3323514b40"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019854 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"e41c7b951d5523f493d3e44c422eb2f476674ac694ba6e3a443bb314f8068bab"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019863 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"1f217165de12b63c91e1fbd871ad07d3070b8407a3d9750bde397f3c7a1cc356"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019871 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"7dc1ecd4e9d792ad830b6b3cddec0aca87a6fc32dfad2067e4fa602b228af523"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019879 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"6c064216482398df313773fb9964e1b8586650597558efb0a3a312e7dde29596"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019887 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"54464affff56c245302f16939d7871865704b43ae97eb183cd35b66f93385f35"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019896 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"aed3f79c8434a0f0105df5fe72412ca9bc5f53d2f122d6b27023c5a8f5c61342"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019905 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"e249fe7a191a944cff40c8c92e3c4958f89cf9fbd1f5d1322ff75e0f69defdff"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019913 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"4675d2c0679cc4f58a6d8737c63a65ad973c3433c64759dcea3d5deff22e30fb"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019921 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"f166640120faeaa707308969f390573411f97a3309e54ac63df05aebb3f19824"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019929 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"394ac56913d9a5c9d5e8f0211780ebf922fd0554782e59a3d6d87d16da29195d"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019938 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"d2f084fa2f64aff150659598e27fe358fc89e0c61c6100a7520978fcf0f7a916"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.019946 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"6e52d87702c312bbb2e29a490519b8aa109bb12950e8b0a94d326f1b63f93999"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.026081 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.027812 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="dfe7291a-aae6-4a8f-9f46-fa4594582dfe" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd" gracePeriod=30 Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.028709 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="db4919298398abfdfcd2f3e6d56abfb1dbc96dc63483648cf678375784ec8d59" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.030531 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance185c-account-delete-4n6ff" event={"ID":"a38d4f6b-b91d-4bb8-9e78-54261e6a285e","Type":"ContainerStarted","Data":"35821848b818e802e86617c5121310d840d01aea88919928aae150a9cd91e8d8"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.030655 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance185c-account-delete-4n6ff" podUID="a38d4f6b-b91d-4bb8-9e78-54261e6a285e" containerName="mariadb-account-delete" containerID="cri-o://19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d" gracePeriod=30 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.038241 4922 generic.go:334] "Generic (PLEG): container finished" podID="cb84f99c-6d00-4023-9520-372992f3646e" containerID="0cb082da33df2e4d81994a52b8d0e177856277b29b39509b8cec8831f4d69eb3" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.039155 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-656896d5d5-fczbx" event={"ID":"cb84f99c-6d00-4023-9520-372992f3646e","Type":"ContainerDied","Data":"0cb082da33df2e4d81994a52b8d0e177856277b29b39509b8cec8831f4d69eb3"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.039243 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-x2kgq"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.052740 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.052958 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="bf1c4a85-458f-4412-ae77-af6d87370b62" containerName="nova-cell1-conductor-conductor" containerID="cri-o://f2294de9bd698e817730f095cb2af7a0db1eeff25afde1767c5f409c1a682198" gracePeriod=30 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.059645 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-kube-api-access-47dtk" (OuterVolumeSpecName: "kube-api-access-47dtk") pod "a41c5e41-6db6-44dc-989d-d7a8ed8ae091" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091"). InnerVolumeSpecName "kube-api-access-47dtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.060130 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-x2kgq"] Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.061043 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="db4919298398abfdfcd2f3e6d56abfb1dbc96dc63483648cf678375784ec8d59" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.061097 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" containerName="galera" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.063294 4922 generic.go:334] "Generic (PLEG): container finished" podID="5b8254ca-83c1-49a8-b453-107577b54f01" containerID="f260bc8fa0de7751b280365c6bd0a0f523435c779c5725e4de6df27a0478f19c" exitCode=143 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.063352 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5b8254ca-83c1-49a8-b453-107577b54f01","Type":"ContainerDied","Data":"f260bc8fa0de7751b280365c6bd0a0f523435c779c5725e4de6df27a0478f19c"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.069914 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lrzqc"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.076136 4922 generic.go:334] "Generic (PLEG): container finished" podID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerID="1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe" exitCode=143 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.076193 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8788e59c-0cd3-43c5-8591-d452f9cb083a","Type":"ContainerDied","Data":"1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.079961 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement66ef-account-delete-nbn4v" event={"ID":"b2c58690-3113-44b8-b2df-cbe69dbd26e3","Type":"ContainerStarted","Data":"106615ede107b186d81c3f18b4b721a7ab45623ea532d1f0aeb6c2bb1bb4b773"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.081692 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-lrzqc"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.088168 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.088338 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="0a5d1af0-eb7d-46ad-b4f1-eceb10445896" containerName="nova-cell0-conductor-conductor" containerID="cri-o://03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" gracePeriod=30 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.089931 4922 generic.go:334] "Generic (PLEG): container finished" podID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerID="6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7" exitCode=143 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.089979 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"391e4250-b978-4ce4-811d-ae2a81a8500f","Type":"ContainerDied","Data":"6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.092827 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.093140 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3ead89b5-3aff-47b9-9516-0eaa33dca7aa" containerName="nova-scheduler-scheduler" containerID="cri-o://52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c" gracePeriod=30 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.099442 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement66ef-account-delete-nbn4v"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.105339 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffzjt\" (UniqueName: \"kubernetes.io/projected/25c9b137-8a15-477d-b87a-b4480c856551-kube-api-access-ffzjt\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.105484 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47dtk\" (UniqueName: \"kubernetes.io/projected/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-kube-api-access-47dtk\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.114642 4922 generic.go:334] "Generic (PLEG): container finished" podID="51f81c86-8f6d-4506-a940-5015032df5bd" containerID="b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7" exitCode=143 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.114770 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-556f68d56-jxmlq" event={"ID":"51f81c86-8f6d-4506-a940-5015032df5bd","Type":"ContainerDied","Data":"b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.131310 4922 scope.go:117] "RemoveContainer" containerID="9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.139698 4922 generic.go:334] "Generic (PLEG): container finished" podID="8026992e-7dd1-42d9-b362-82febc75c072" containerID="e3c85f65a2283ba365db1c99edfb8fc2be5ee33fe88ee44750524b8d792b1cbf" exitCode=143 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.139801 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8026992e-7dd1-42d9-b362-82febc75c072","Type":"ContainerDied","Data":"e3c85f65a2283ba365db1c99edfb8fc2be5ee33fe88ee44750524b8d792b1cbf"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.143864 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell1fcf1-account-delete-vxvzv"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.156100 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance185c-account-delete-4n6ff" podStartSLOduration=3.156078954 podStartE2EDuration="3.156078954s" podCreationTimestamp="2025-09-29 22:50:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 22:50:17.069652593 +0000 UTC m=+1421.379941406" watchObservedRunningTime="2025-09-29 22:50:17.156078954 +0000 UTC m=+1421.466367767" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.166091 4922 generic.go:334] "Generic (PLEG): container finished" podID="25c9b137-8a15-477d-b87a-b4480c856551" containerID="e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0" exitCode=137 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.166219 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.196822 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "25c9b137-8a15-477d-b87a-b4480c856551" (UID: "25c9b137-8a15-477d-b87a-b4480c856551"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.203182 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25c9b137-8a15-477d-b87a-b4480c856551" (UID: "25c9b137-8a15-477d-b87a-b4480c856551"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.211605 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.211640 4922 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.221887 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell04139-account-delete-b4vm7"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.226768 4922 generic.go:334] "Generic (PLEG): container finished" podID="4594a140-3321-4a34-ab35-65ad3560b085" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" exitCode=0 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.226829 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4jkkx" event={"ID":"4594a140-3321-4a34-ab35-65ad3560b085","Type":"ContainerDied","Data":"2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.236733 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_4a7323e3-8b0e-4f74-b0f4-73c5874fe361/ovsdbserver-sb/0.log" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.236795 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"4a7323e3-8b0e-4f74-b0f4-73c5874fe361","Type":"ContainerDied","Data":"92ca24d9521be6c461634b27176ea969e7a6aa3578044b9212d071a9ed1689ce"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.236919 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.244452 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-vc9v7_0f689cf2-292c-47a9-936d-57954d187f5d/openstack-network-exporter/0.log" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.244619 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-vc9v7" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.245497 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-vc9v7" event={"ID":"0f689cf2-292c-47a9-936d-57954d187f5d","Type":"ContainerDied","Data":"85f78abe9162ea5dece822f6e0983f34c387e430c739b4cbba9410e3ac7c7769"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.262022 4922 generic.go:334] "Generic (PLEG): container finished" podID="447099dc-1eea-4510-8b94-faa6899f6b06" containerID="ccbfc95659bd8ac0f5153a3e15fe5796f00cb82dbede7bd19b2c1d755699bbe7" exitCode=143 Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.262061 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"447099dc-1eea-4510-8b94-faa6899f6b06","Type":"ContainerDied","Data":"ccbfc95659bd8ac0f5153a3e15fe5796f00cb82dbede7bd19b2c1d755699bbe7"} Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.264896 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a41c5e41-6db6-44dc-989d-d7a8ed8ae091" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.314442 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.317989 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.327352 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.334643 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-vc9v7"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.338661 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-vc9v7"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.381528 4922 scope.go:117] "RemoveContainer" containerID="bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c" Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.382660 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c\": container with ID starting with bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c not found: ID does not exist" containerID="bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.382688 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c"} err="failed to get container status \"bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c\": rpc error: code = NotFound desc = could not find container \"bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c\": container with ID starting with bdf377595e39aeb69d27673b4a600bd8df6e137fae11707c9a65e3481e37698c not found: ID does not exist" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.382711 4922 scope.go:117] "RemoveContainer" containerID="9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6" Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.383153 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6\": container with ID starting with 9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6 not found: ID does not exist" containerID="9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.383175 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6"} err="failed to get container status \"9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6\": rpc error: code = NotFound desc = could not find container \"9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6\": container with ID starting with 9cefbbe047183823c773cbedb8d553c227a04852e1cdc3affa8b25f8c30607c6 not found: ID does not exist" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.383192 4922 scope.go:117] "RemoveContainer" containerID="e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.389167 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a41c5e41-6db6-44dc-989d-d7a8ed8ae091" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.426875 4922 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.467119 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a41c5e41-6db6-44dc-989d-d7a8ed8ae091" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.525639 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config" (OuterVolumeSpecName: "config") pod "a41c5e41-6db6-44dc-989d-d7a8ed8ae091" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.526427 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a41c5e41-6db6-44dc-989d-d7a8ed8ae091" (UID: "a41c5e41-6db6-44dc-989d-d7a8ed8ae091"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.528635 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.528656 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.528665 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a41c5e41-6db6-44dc-989d-d7a8ed8ae091-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.618863 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "25c9b137-8a15-477d-b87a-b4480c856551" (UID: "25c9b137-8a15-477d-b87a-b4480c856551"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.642744 4922 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/25c9b137-8a15-477d-b87a-b4480c856551-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.845851 4922 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.845905 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data podName:cab5f5be-6bdd-481b-a07b-08491f6f2be5 nodeName:}" failed. No retries permitted until 2025-09-29 22:50:21.845891984 +0000 UTC m=+1426.156180787 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data") pod "rabbitmq-cell1-server-0" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5") : configmap "rabbitmq-cell1-config-data" not found Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.853152 4922 scope.go:117] "RemoveContainer" containerID="e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0" Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.853685 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0\": container with ID starting with e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0 not found: ID does not exist" containerID="e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.853731 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0"} err="failed to get container status \"e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0\": rpc error: code = NotFound desc = could not find container \"e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0\": container with ID starting with e832d33d807f8522fd995aaa412a36c8ef46674042577e60bb26ba5e574c0fb0 not found: ID does not exist" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.853759 4922 scope.go:117] "RemoveContainer" containerID="de3b093fd1fce51dc98ae3bc522017ff5ef8c0b5fedc8d245e4289a030aa5618" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.868601 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance185c-account-delete-4n6ff" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.884805 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kl2pv"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.890022 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kl2pv"] Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.917789 4922 scope.go:117] "RemoveContainer" containerID="2816bb52bd16842038fd32d1dd77065026a2fad79d313d6f6b02d9198b741f97" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.947068 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmwjx\" (UniqueName: \"kubernetes.io/projected/a38d4f6b-b91d-4bb8-9e78-54261e6a285e-kube-api-access-wmwjx\") pod \"a38d4f6b-b91d-4bb8-9e78-54261e6a285e\" (UID: \"a38d4f6b-b91d-4bb8-9e78-54261e6a285e\") " Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.947673 4922 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 29 22:50:17 crc kubenswrapper[4922]: E0929 22:50:17.947723 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data podName:e56d31de-64f5-42a7-8243-7ac6d992a03d nodeName:}" failed. No retries permitted until 2025-09-29 22:50:21.947708746 +0000 UTC m=+1426.257997559 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data") pod "rabbitmq-server-0" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d") : configmap "rabbitmq-config-data" not found Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.958759 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a38d4f6b-b91d-4bb8-9e78-54261e6a285e-kube-api-access-wmwjx" (OuterVolumeSpecName: "kube-api-access-wmwjx") pod "a38d4f6b-b91d-4bb8-9e78-54261e6a285e" (UID: "a38d4f6b-b91d-4bb8-9e78-54261e6a285e"). InnerVolumeSpecName "kube-api-access-wmwjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:17 crc kubenswrapper[4922]: I0929 22:50:17.999548 4922 scope.go:117] "RemoveContainer" containerID="3db7f086a726daa10073df95a7a2a04b8afb9e0774984976907105555406f660" Sep 29 22:50:18 crc kubenswrapper[4922]: E0929 22:50:18.013696 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 29 22:50:18 crc kubenswrapper[4922]: E0929 22:50:18.015604 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 29 22:50:18 crc kubenswrapper[4922]: E0929 22:50:18.029558 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 29 22:50:18 crc kubenswrapper[4922]: E0929 22:50:18.029673 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="0a5d1af0-eb7d-46ad-b4f1-eceb10445896" containerName="nova-cell0-conductor-conductor" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.032588 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.049831 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmwjx\" (UniqueName: \"kubernetes.io/projected/a38d4f6b-b91d-4bb8-9e78-54261e6a285e-kube-api-access-wmwjx\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.151981 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-combined-ca-bundle\") pod \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.152061 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-internal-tls-certs\") pod \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.152491 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-log-httpd\") pod \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.152582 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-etc-swift\") pod \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.152656 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-run-httpd\") pod \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.152801 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skjhx\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-kube-api-access-skjhx\") pod \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.152872 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-config-data\") pod \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.152907 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-public-tls-certs\") pod \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\" (UID: \"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.153497 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" (UID: "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.154330 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" (UID: "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.157686 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" (UID: "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.160495 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-kube-api-access-skjhx" (OuterVolumeSpecName: "kube-api-access-skjhx") pod "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" (UID: "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb"). InnerVolumeSpecName "kube-api-access-skjhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.161962 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.244139 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" (UID: "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.244650 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" (UID: "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.248523 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" (UID: "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.249041 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-config-data" (OuterVolumeSpecName: "config-data") pod "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" (UID: "a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.254087 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-nova-novncproxy-tls-certs\") pod \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.254221 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-vencrypt-tls-certs\") pod \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.254274 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmsrv\" (UniqueName: \"kubernetes.io/projected/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-kube-api-access-xmsrv\") pod \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.254315 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-config-data\") pod \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.254447 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-combined-ca-bundle\") pod \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\" (UID: \"dfe7291a-aae6-4a8f-9f46-fa4594582dfe\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.255033 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.255057 4922 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.255066 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.255074 4922 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.255082 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.255090 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skjhx\" (UniqueName: \"kubernetes.io/projected/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-kube-api-access-skjhx\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.255237 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.255249 4922 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.265375 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-kube-api-access-xmsrv" (OuterVolumeSpecName: "kube-api-access-xmsrv") pod "dfe7291a-aae6-4a8f-9f46-fa4594582dfe" (UID: "dfe7291a-aae6-4a8f-9f46-fa4594582dfe"). InnerVolumeSpecName "kube-api-access-xmsrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.284562 4922 generic.go:334] "Generic (PLEG): container finished" podID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerID="db4826cac698823aad07ded4c68c796267271768ac310ffcee02df8874d50b96" exitCode=0 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.284627 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b","Type":"ContainerDied","Data":"db4826cac698823aad07ded4c68c796267271768ac310ffcee02df8874d50b96"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.286370 4922 generic.go:334] "Generic (PLEG): container finished" podID="dfe7291a-aae6-4a8f-9f46-fa4594582dfe" containerID="be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd" exitCode=0 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.286423 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dfe7291a-aae6-4a8f-9f46-fa4594582dfe","Type":"ContainerDied","Data":"be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.286439 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dfe7291a-aae6-4a8f-9f46-fa4594582dfe","Type":"ContainerDied","Data":"4762323440fe763bdffe524f4e1afdc05e73022cd4d74ec2d49db1ff65a0e71c"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.286456 4922 scope.go:117] "RemoveContainer" containerID="be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.286564 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.310446 4922 generic.go:334] "Generic (PLEG): container finished" podID="b2c58690-3113-44b8-b2df-cbe69dbd26e3" containerID="71a056ad14953dc982ce0784ef3024d978ae9d566dcaeaee5239afa4952f28fc" exitCode=0 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.310877 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement66ef-account-delete-nbn4v" event={"ID":"b2c58690-3113-44b8-b2df-cbe69dbd26e3","Type":"ContainerDied","Data":"71a056ad14953dc982ce0784ef3024d978ae9d566dcaeaee5239afa4952f28fc"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.316487 4922 generic.go:334] "Generic (PLEG): container finished" podID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerID="68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0" exitCode=0 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.316544 4922 generic.go:334] "Generic (PLEG): container finished" podID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerID="108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702" exitCode=0 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.316602 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" event={"ID":"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb","Type":"ContainerDied","Data":"68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.316632 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" event={"ID":"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb","Type":"ContainerDied","Data":"108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.316641 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" event={"ID":"a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb","Type":"ContainerDied","Data":"b0be4d9f34ba48d67b765bde4ff298d050f698e4645dde38152948a53d2aeffe"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.316721 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5fc58fc6cf-b5wq2" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.330673 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-config-data" (OuterVolumeSpecName: "config-data") pod "dfe7291a-aae6-4a8f-9f46-fa4594582dfe" (UID: "dfe7291a-aae6-4a8f-9f46-fa4594582dfe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.335579 4922 generic.go:334] "Generic (PLEG): container finished" podID="aa85a019-83a6-4b71-abdb-7144be0105ae" containerID="db4919298398abfdfcd2f3e6d56abfb1dbc96dc63483648cf678375784ec8d59" exitCode=0 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.335656 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"aa85a019-83a6-4b71-abdb-7144be0105ae","Type":"ContainerDied","Data":"db4919298398abfdfcd2f3e6d56abfb1dbc96dc63483648cf678375784ec8d59"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.340904 4922 generic.go:334] "Generic (PLEG): container finished" podID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerID="172f258b5cd1d2a007268349f6866d6235b2344ef34219c5f1c44b3260c97af7" exitCode=143 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.341727 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75fb76f858-mmqwn" event={"ID":"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3","Type":"ContainerDied","Data":"172f258b5cd1d2a007268349f6866d6235b2344ef34219c5f1c44b3260c97af7"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.344437 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "dfe7291a-aae6-4a8f-9f46-fa4594582dfe" (UID: "dfe7291a-aae6-4a8f-9f46-fa4594582dfe"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.346892 4922 generic.go:334] "Generic (PLEG): container finished" podID="a38d4f6b-b91d-4bb8-9e78-54261e6a285e" containerID="19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d" exitCode=0 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.346933 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance185c-account-delete-4n6ff" event={"ID":"a38d4f6b-b91d-4bb8-9e78-54261e6a285e","Type":"ContainerDied","Data":"35821848b818e802e86617c5121310d840d01aea88919928aae150a9cd91e8d8"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.346954 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance185c-account-delete-4n6ff" event={"ID":"a38d4f6b-b91d-4bb8-9e78-54261e6a285e","Type":"ContainerDied","Data":"19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.347002 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance185c-account-delete-4n6ff" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.348323 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfe7291a-aae6-4a8f-9f46-fa4594582dfe" (UID: "dfe7291a-aae6-4a8f-9f46-fa4594582dfe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.356357 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.356377 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.356541 4922 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.356608 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmsrv\" (UniqueName: \"kubernetes.io/projected/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-kube-api-access-xmsrv\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.358744 4922 generic.go:334] "Generic (PLEG): container finished" podID="cd700631-7b12-4e93-9e40-747b09623e7e" containerID="7f7d7eba3fa2e00ed7439126d30f059863a1f9a8c6d95ea44f9d8d23d062b615" exitCode=0 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.358859 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell04139-account-delete-b4vm7" event={"ID":"cd700631-7b12-4e93-9e40-747b09623e7e","Type":"ContainerDied","Data":"7f7d7eba3fa2e00ed7439126d30f059863a1f9a8c6d95ea44f9d8d23d062b615"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.358889 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell04139-account-delete-b4vm7" event={"ID":"cd700631-7b12-4e93-9e40-747b09623e7e","Type":"ContainerStarted","Data":"ff41fd46b91a69f41c32753d106dce059c18608c1816bcfe9a5029fade3d043e"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.372999 4922 generic.go:334] "Generic (PLEG): container finished" podID="d0a467a9-053f-4f41-b6b2-529130d42122" containerID="f28d240c550ebdec8934052bf4bbd350f9d62863173b23b0ac7de1265103c5eb" exitCode=1 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.373062 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell1fcf1-account-delete-vxvzv" event={"ID":"d0a467a9-053f-4f41-b6b2-529130d42122","Type":"ContainerDied","Data":"f28d240c550ebdec8934052bf4bbd350f9d62863173b23b0ac7de1265103c5eb"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.373085 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell1fcf1-account-delete-vxvzv" event={"ID":"d0a467a9-053f-4f41-b6b2-529130d42122","Type":"ContainerStarted","Data":"966e831879b73edcf3ff1e28f5bf31fba2802c5b3aaa4b15ce1c17a340c758f3"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.382681 4922 generic.go:334] "Generic (PLEG): container finished" podID="98ffad34-9721-4849-84ba-f14c518250ac" containerID="8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9" exitCode=143 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.382755 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-8f78686f5-pj8pr" event={"ID":"98ffad34-9721-4849-84ba-f14c518250ac","Type":"ContainerDied","Data":"8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.388163 4922 generic.go:334] "Generic (PLEG): container finished" podID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerID="a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397" exitCode=143 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.388237 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" event={"ID":"4799fcf9-24e7-4c61-9e5e-109105ec7003","Type":"ContainerDied","Data":"a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397"} Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.409358 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "dfe7291a-aae6-4a8f-9f46-fa4594582dfe" (UID: "dfe7291a-aae6-4a8f-9f46-fa4594582dfe"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.447667 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f689cf2-292c-47a9-936d-57954d187f5d" path="/var/lib/kubelet/pods/0f689cf2-292c-47a9-936d-57954d187f5d/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.448304 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fdfa237-2999-45c5-a008-0e99fa6f479f" path="/var/lib/kubelet/pods/1fdfa237-2999-45c5-a008-0e99fa6f479f/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.449917 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25c9b137-8a15-477d-b87a-b4480c856551" path="/var/lib/kubelet/pods/25c9b137-8a15-477d-b87a-b4480c856551/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.450979 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a1d01b5-f345-4aa5-88b0-a64c534f661c" path="/var/lib/kubelet/pods/3a1d01b5-f345-4aa5-88b0-a64c534f661c/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.451576 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" path="/var/lib/kubelet/pods/4a7323e3-8b0e-4f74-b0f4-73c5874fe361/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.452071 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7253e3fb-15f2-40c8-886e-98730dd4279b" path="/var/lib/kubelet/pods/7253e3fb-15f2-40c8-886e-98730dd4279b/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.452500 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="755c37dc-48a9-4941-8410-1832fb4a78e8" path="/var/lib/kubelet/pods/755c37dc-48a9-4941-8410-1832fb4a78e8/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.455077 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94230f56-1036-4c04-8b74-b7138466df0c" path="/var/lib/kubelet/pods/94230f56-1036-4c04-8b74-b7138466df0c/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.455569 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a41c5e41-6db6-44dc-989d-d7a8ed8ae091" path="/var/lib/kubelet/pods/a41c5e41-6db6-44dc-989d-d7a8ed8ae091/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.460293 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bce38540-6796-48b5-82e7-aad30cf98841" path="/var/lib/kubelet/pods/bce38540-6796-48b5-82e7-aad30cf98841/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.461073 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3f528ca-b48a-4e5d-b801-620778a59ec6" path="/var/lib/kubelet/pods/d3f528ca-b48a-4e5d-b801-620778a59ec6/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.461511 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fff35789-9cfe-47f4-8477-ddaf5caf85fc" path="/var/lib/kubelet/pods/fff35789-9cfe-47f4-8477-ddaf5caf85fc/volumes" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.463273 4922 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/dfe7291a-aae6-4a8f-9f46-fa4594582dfe-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.508316 4922 scope.go:117] "RemoveContainer" containerID="be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd" Sep 29 22:50:18 crc kubenswrapper[4922]: E0929 22:50:18.509189 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd\": container with ID starting with be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd not found: ID does not exist" containerID="be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.509219 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd"} err="failed to get container status \"be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd\": rpc error: code = NotFound desc = could not find container \"be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd\": container with ID starting with be97f7520dddf7b14a9e2706f13ea0c6d554c3e7114c07f45fded3cd2b8c01fd not found: ID does not exist" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.509239 4922 scope.go:117] "RemoveContainer" containerID="68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.525643 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.540612 4922 scope.go:117] "RemoveContainer" containerID="108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.541496 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-5fc58fc6cf-b5wq2"] Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.548565 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-5fc58fc6cf-b5wq2"] Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.554090 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance185c-account-delete-4n6ff"] Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.560283 4922 scope.go:117] "RemoveContainer" containerID="68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0" Sep 29 22:50:18 crc kubenswrapper[4922]: E0929 22:50:18.560765 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0\": container with ID starting with 68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0 not found: ID does not exist" containerID="68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.560795 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0"} err="failed to get container status \"68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0\": rpc error: code = NotFound desc = could not find container \"68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0\": container with ID starting with 68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0 not found: ID does not exist" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.560814 4922 scope.go:117] "RemoveContainer" containerID="108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702" Sep 29 22:50:18 crc kubenswrapper[4922]: E0929 22:50:18.560983 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702\": container with ID starting with 108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702 not found: ID does not exist" containerID="108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.561001 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702"} err="failed to get container status \"108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702\": rpc error: code = NotFound desc = could not find container \"108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702\": container with ID starting with 108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702 not found: ID does not exist" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.561012 4922 scope.go:117] "RemoveContainer" containerID="68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.561159 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0"} err="failed to get container status \"68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0\": rpc error: code = NotFound desc = could not find container \"68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0\": container with ID starting with 68fdcdc12617b0d125f6bfed432216e0c510797dbe72b49e4961fd8ba6cbd4f0 not found: ID does not exist" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.561174 4922 scope.go:117] "RemoveContainer" containerID="108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.561319 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702"} err="failed to get container status \"108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702\": rpc error: code = NotFound desc = could not find container \"108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702\": container with ID starting with 108a16f4871be29ac5cec52e91f229f1385810778830b84be709742145718702 not found: ID does not exist" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.561335 4922 scope.go:117] "RemoveContainer" containerID="19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.563808 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-default\") pod \"aa85a019-83a6-4b71-abdb-7144be0105ae\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.563885 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-secrets\") pod \"aa85a019-83a6-4b71-abdb-7144be0105ae\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.564038 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfqvh\" (UniqueName: \"kubernetes.io/projected/aa85a019-83a6-4b71-abdb-7144be0105ae-kube-api-access-nfqvh\") pod \"aa85a019-83a6-4b71-abdb-7144be0105ae\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.564062 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-generated\") pod \"aa85a019-83a6-4b71-abdb-7144be0105ae\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.565019 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "aa85a019-83a6-4b71-abdb-7144be0105ae" (UID: "aa85a019-83a6-4b71-abdb-7144be0105ae"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.565522 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-kolla-config\") pod \"aa85a019-83a6-4b71-abdb-7144be0105ae\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.565592 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"aa85a019-83a6-4b71-abdb-7144be0105ae\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.565780 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-operator-scripts\") pod \"aa85a019-83a6-4b71-abdb-7144be0105ae\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.565876 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-galera-tls-certs\") pod \"aa85a019-83a6-4b71-abdb-7144be0105ae\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.565948 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-combined-ca-bundle\") pod \"aa85a019-83a6-4b71-abdb-7144be0105ae\" (UID: \"aa85a019-83a6-4b71-abdb-7144be0105ae\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.566361 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance185c-account-delete-4n6ff"] Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.566996 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "aa85a019-83a6-4b71-abdb-7144be0105ae" (UID: "aa85a019-83a6-4b71-abdb-7144be0105ae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.567507 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "aa85a019-83a6-4b71-abdb-7144be0105ae" (UID: "aa85a019-83a6-4b71-abdb-7144be0105ae"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.568682 4922 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-operator-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.568703 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-generated\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.568716 4922 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-kolla-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.570454 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "aa85a019-83a6-4b71-abdb-7144be0105ae" (UID: "aa85a019-83a6-4b71-abdb-7144be0105ae"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.574242 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa85a019-83a6-4b71-abdb-7144be0105ae-kube-api-access-nfqvh" (OuterVolumeSpecName: "kube-api-access-nfqvh") pod "aa85a019-83a6-4b71-abdb-7144be0105ae" (UID: "aa85a019-83a6-4b71-abdb-7144be0105ae"). InnerVolumeSpecName "kube-api-access-nfqvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.577807 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-secrets" (OuterVolumeSpecName: "secrets") pod "aa85a019-83a6-4b71-abdb-7144be0105ae" (UID: "aa85a019-83a6-4b71-abdb-7144be0105ae"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.583190 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "mysql-db") pod "aa85a019-83a6-4b71-abdb-7144be0105ae" (UID: "aa85a019-83a6-4b71-abdb-7144be0105ae"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.588850 4922 scope.go:117] "RemoveContainer" containerID="19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d" Sep 29 22:50:18 crc kubenswrapper[4922]: E0929 22:50:18.591760 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d\": container with ID starting with 19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d not found: ID does not exist" containerID="19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.591790 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d"} err="failed to get container status \"19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d\": rpc error: code = NotFound desc = could not find container \"19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d\": container with ID starting with 19db1ccda186ddede98a84588f58a10d7d2e9a57a9eab130fb8c24a88e497a6d not found: ID does not exist" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.620059 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa85a019-83a6-4b71-abdb-7144be0105ae" (UID: "aa85a019-83a6-4b71-abdb-7144be0105ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.640494 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.649936 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.670741 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.670762 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/aa85a019-83a6-4b71-abdb-7144be0105ae-config-data-default\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.670770 4922 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.670783 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfqvh\" (UniqueName: \"kubernetes.io/projected/aa85a019-83a6-4b71-abdb-7144be0105ae-kube-api-access-nfqvh\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.670812 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.697212 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "aa85a019-83a6-4b71-abdb-7144be0105ae" (UID: "aa85a019-83a6-4b71-abdb-7144be0105ae"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.702268 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.746199 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement66ef-account-delete-nbn4v" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.771825 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7q6q\" (UniqueName: \"kubernetes.io/projected/b2c58690-3113-44b8-b2df-cbe69dbd26e3-kube-api-access-b7q6q\") pod \"b2c58690-3113-44b8-b2df-cbe69dbd26e3\" (UID: \"b2c58690-3113-44b8-b2df-cbe69dbd26e3\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.772281 4922 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa85a019-83a6-4b71-abdb-7144be0105ae-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.772295 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.777162 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2c58690-3113-44b8-b2df-cbe69dbd26e3-kube-api-access-b7q6q" (OuterVolumeSpecName: "kube-api-access-b7q6q") pod "b2c58690-3113-44b8-b2df-cbe69dbd26e3" (UID: "b2c58690-3113-44b8-b2df-cbe69dbd26e3"). InnerVolumeSpecName "kube-api-access-b7q6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.808948 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron818d-account-delete-xtx9k" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.831180 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell1fcf1-account-delete-vxvzv" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.873855 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vwhq\" (UniqueName: \"kubernetes.io/projected/0082d9b7-4b81-47ca-8ba7-61429fdcc678-kube-api-access-5vwhq\") pod \"0082d9b7-4b81-47ca-8ba7-61429fdcc678\" (UID: \"0082d9b7-4b81-47ca-8ba7-61429fdcc678\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.874165 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27gf5\" (UniqueName: \"kubernetes.io/projected/d0a467a9-053f-4f41-b6b2-529130d42122-kube-api-access-27gf5\") pod \"d0a467a9-053f-4f41-b6b2-529130d42122\" (UID: \"d0a467a9-053f-4f41-b6b2-529130d42122\") " Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.874595 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7q6q\" (UniqueName: \"kubernetes.io/projected/b2c58690-3113-44b8-b2df-cbe69dbd26e3-kube-api-access-b7q6q\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.877547 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0082d9b7-4b81-47ca-8ba7-61429fdcc678-kube-api-access-5vwhq" (OuterVolumeSpecName: "kube-api-access-5vwhq") pod "0082d9b7-4b81-47ca-8ba7-61429fdcc678" (UID: "0082d9b7-4b81-47ca-8ba7-61429fdcc678"). InnerVolumeSpecName "kube-api-access-5vwhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.878511 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0a467a9-053f-4f41-b6b2-529130d42122-kube-api-access-27gf5" (OuterVolumeSpecName: "kube-api-access-27gf5") pod "d0a467a9-053f-4f41-b6b2-529130d42122" (UID: "d0a467a9-053f-4f41-b6b2-529130d42122"). InnerVolumeSpecName "kube-api-access-27gf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.948986 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.949307 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="ceilometer-central-agent" containerID="cri-o://13fd77bc452c12f556dd75f45bf37781a1403c31edf646ab5a140812ff690364" gracePeriod=30 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.949760 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="proxy-httpd" containerID="cri-o://cae850c08219cac4ce375335d50b6de2b3c7fb4df5328a2868d0238ce2244141" gracePeriod=30 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.949802 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="sg-core" containerID="cri-o://ce05b6489484a12ed071a747de16f8ec141d68489e436390d82db8647d849b69" gracePeriod=30 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.949837 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="ceilometer-notification-agent" containerID="cri-o://bcdb4f9a717ab87bcdb1ad12cb92702bad850a19ba4db942f610381ad13eb2b3" gracePeriod=30 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.958421 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.959447 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="1c5eef11-d4e0-43cd-b305-c427f85d173a" containerName="kube-state-metrics" containerID="cri-o://9d81b17eb6b803729c871b1ba518d9db5c6074ebf6d349cf4471bdfb5a4bac22" gracePeriod=30 Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.981021 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27gf5\" (UniqueName: \"kubernetes.io/projected/d0a467a9-053f-4f41-b6b2-529130d42122-kube-api-access-27gf5\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:18 crc kubenswrapper[4922]: I0929 22:50:18.981049 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vwhq\" (UniqueName: \"kubernetes.io/projected/0082d9b7-4b81-47ca-8ba7-61429fdcc678-kube-api-access-5vwhq\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.048510 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.048810 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="1654e799-40ef-413a-8324-bb5b4f7a8f17" containerName="memcached" containerID="cri-o://e253e117e33e1551c4b4d444e0f50023636461d456d2027cdac35eb3aeb6c536" gracePeriod=30 Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.111970 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-fktvf"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.131111 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-59s2p"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.144187 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-fktvf"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.163122 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-59s2p"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.178833 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-66cbdc5bdb-sfmk4"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.179098 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-66cbdc5bdb-sfmk4" podUID="19fc05a2-d210-4c05-8341-eafdbcc40dc1" containerName="keystone-api" containerID="cri-o://f71b4f675a06896c34fd4e0be64b4fe734c8451e3903876095b9c46d095cd09a" gracePeriod=30 Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.193138 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.202485 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-r5zbt"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.220166 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-r5zbt"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.237443 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron818d-account-delete-xtx9k"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.244985 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-818d-account-create-rgcn6"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.250021 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-818d-account-create-rgcn6"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.280698 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron818d-account-delete-xtx9k"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.293130 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-wfl9h"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.326820 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-dc06-account-create-ls6hh"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.332881 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-wfl9h"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.338665 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-dc06-account-create-ls6hh"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.411926 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" containerName="galera" containerID="cri-o://c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa" gracePeriod=30 Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.414464 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e76daf748777017a5acd06b97290f6a97953429821f9c26e34b0a82de51481fc" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.414544 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron818d-account-delete-xtx9k" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.430360 4922 generic.go:334] "Generic (PLEG): container finished" podID="447099dc-1eea-4510-8b94-faa6899f6b06" containerID="4bade6528d890c812a2f6c25a51fb063416552fbc042c3c5effa4226f2415177" exitCode=0 Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.430440 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"447099dc-1eea-4510-8b94-faa6899f6b06","Type":"ContainerDied","Data":"4bade6528d890c812a2f6c25a51fb063416552fbc042c3c5effa4226f2415177"} Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.436107 4922 generic.go:334] "Generic (PLEG): container finished" podID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerID="70b0fa4952e40bc0e0d7fd5d77a22557f9abd49c9cf9a6a2477a7399f2433c1f" exitCode=0 Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.436187 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b","Type":"ContainerDied","Data":"70b0fa4952e40bc0e0d7fd5d77a22557f9abd49c9cf9a6a2477a7399f2433c1f"} Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.439783 4922 generic.go:334] "Generic (PLEG): container finished" podID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerID="ce05b6489484a12ed071a747de16f8ec141d68489e436390d82db8647d849b69" exitCode=2 Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.439827 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerDied","Data":"ce05b6489484a12ed071a747de16f8ec141d68489e436390d82db8647d849b69"} Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.444279 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"aa85a019-83a6-4b71-abdb-7144be0105ae","Type":"ContainerDied","Data":"9016f9e19bef00c2a06f46e5f6cb599045903a16c5e1959454384154153a7943"} Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.444337 4922 scope.go:117] "RemoveContainer" containerID="db4919298398abfdfcd2f3e6d56abfb1dbc96dc63483648cf678375784ec8d59" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.444541 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.453164 4922 generic.go:334] "Generic (PLEG): container finished" podID="5b8254ca-83c1-49a8-b453-107577b54f01" containerID="e9c29c8849b943f694f98cfb6baeef978eb44c7f2f718c45a1d5e25db0e098e6" exitCode=0 Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.453281 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5b8254ca-83c1-49a8-b453-107577b54f01","Type":"ContainerDied","Data":"e9c29c8849b943f694f98cfb6baeef978eb44c7f2f718c45a1d5e25db0e098e6"} Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.463495 4922 generic.go:334] "Generic (PLEG): container finished" podID="1c5eef11-d4e0-43cd-b305-c427f85d173a" containerID="9d81b17eb6b803729c871b1ba518d9db5c6074ebf6d349cf4471bdfb5a4bac22" exitCode=2 Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.463635 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1c5eef11-d4e0-43cd-b305-c427f85d173a","Type":"ContainerDied","Data":"9d81b17eb6b803729c871b1ba518d9db5c6074ebf6d349cf4471bdfb5a4bac22"} Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.479496 4922 scope.go:117] "RemoveContainer" containerID="d4fadda570345bb9aa23bb32aebb1fcd7e4a48ba19bc93e554b2daf36cc8b8d3" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.479759 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement66ef-account-delete-nbn4v" event={"ID":"b2c58690-3113-44b8-b2df-cbe69dbd26e3","Type":"ContainerDied","Data":"106615ede107b186d81c3f18b4b721a7ab45623ea532d1f0aeb6c2bb1bb4b773"} Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.479894 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement66ef-account-delete-nbn4v" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.498886 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.501949 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.508482 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell1fcf1-account-delete-vxvzv" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.511921 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell1fcf1-account-delete-vxvzv" event={"ID":"d0a467a9-053f-4f41-b6b2-529130d42122","Type":"ContainerDied","Data":"966e831879b73edcf3ff1e28f5bf31fba2802c5b3aaa4b15ce1c17a340c758f3"} Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.557387 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.165:8776/healthcheck\": read tcp 10.217.0.2:49426->10.217.0.165:8776: read: connection reset by peer" Sep 29 22:50:19 crc kubenswrapper[4922]: E0929 22:50:19.649502 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:19 crc kubenswrapper[4922]: E0929 22:50:19.650371 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:19 crc kubenswrapper[4922]: E0929 22:50:19.650647 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:19 crc kubenswrapper[4922]: E0929 22:50:19.650669 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" Sep 29 22:50:19 crc kubenswrapper[4922]: E0929 22:50:19.651010 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:19 crc kubenswrapper[4922]: E0929 22:50:19.652453 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:19 crc kubenswrapper[4922]: E0929 22:50:19.658738 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:19 crc kubenswrapper[4922]: E0929 22:50:19.658807 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.769556 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.771833 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell1fcf1-account-delete-vxvzv"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.773225 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.779741 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell1fcf1-account-delete-vxvzv"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.790704 4922 scope.go:117] "RemoveContainer" containerID="71a056ad14953dc982ce0784ef3024d978ae9d566dcaeaee5239afa4952f28fc" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.790967 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement66ef-account-delete-nbn4v"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.798114 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement66ef-account-delete-nbn4v"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.812420 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817293 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-combined-ca-bundle\") pod \"1c5eef11-d4e0-43cd-b305-c427f85d173a\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817339 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-combined-ca-bundle\") pod \"447099dc-1eea-4510-8b94-faa6899f6b06\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817382 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmlwv\" (UniqueName: \"kubernetes.io/projected/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-api-access-gmlwv\") pod \"1c5eef11-d4e0-43cd-b305-c427f85d173a\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817446 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-logs\") pod \"447099dc-1eea-4510-8b94-faa6899f6b06\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817485 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-config\") pod \"1c5eef11-d4e0-43cd-b305-c427f85d173a\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817503 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-config-data\") pod \"447099dc-1eea-4510-8b94-faa6899f6b06\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817526 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-certs\") pod \"1c5eef11-d4e0-43cd-b305-c427f85d173a\" (UID: \"1c5eef11-d4e0-43cd-b305-c427f85d173a\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817582 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zd7lp\" (UniqueName: \"kubernetes.io/projected/447099dc-1eea-4510-8b94-faa6899f6b06-kube-api-access-zd7lp\") pod \"447099dc-1eea-4510-8b94-faa6899f6b06\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817635 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-httpd-run\") pod \"447099dc-1eea-4510-8b94-faa6899f6b06\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817656 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"447099dc-1eea-4510-8b94-faa6899f6b06\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817678 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-internal-tls-certs\") pod \"447099dc-1eea-4510-8b94-faa6899f6b06\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.817696 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-scripts\") pod \"447099dc-1eea-4510-8b94-faa6899f6b06\" (UID: \"447099dc-1eea-4510-8b94-faa6899f6b06\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.823978 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "447099dc-1eea-4510-8b94-faa6899f6b06" (UID: "447099dc-1eea-4510-8b94-faa6899f6b06"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.824937 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-logs" (OuterVolumeSpecName: "logs") pod "447099dc-1eea-4510-8b94-faa6899f6b06" (UID: "447099dc-1eea-4510-8b94-faa6899f6b06"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.825056 4922 scope.go:117] "RemoveContainer" containerID="f28d240c550ebdec8934052bf4bbd350f9d62863173b23b0ac7de1265103c5eb" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.829863 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "447099dc-1eea-4510-8b94-faa6899f6b06" (UID: "447099dc-1eea-4510-8b94-faa6899f6b06"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.831545 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-scripts" (OuterVolumeSpecName: "scripts") pod "447099dc-1eea-4510-8b94-faa6899f6b06" (UID: "447099dc-1eea-4510-8b94-faa6899f6b06"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.832827 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/447099dc-1eea-4510-8b94-faa6899f6b06-kube-api-access-zd7lp" (OuterVolumeSpecName: "kube-api-access-zd7lp") pod "447099dc-1eea-4510-8b94-faa6899f6b06" (UID: "447099dc-1eea-4510-8b94-faa6899f6b06"). InnerVolumeSpecName "kube-api-access-zd7lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.835187 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-api-access-gmlwv" (OuterVolumeSpecName: "kube-api-access-gmlwv") pod "1c5eef11-d4e0-43cd-b305-c427f85d173a" (UID: "1c5eef11-d4e0-43cd-b305-c427f85d173a"). InnerVolumeSpecName "kube-api-access-gmlwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.869547 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.879850 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "447099dc-1eea-4510-8b94-faa6899f6b06" (UID: "447099dc-1eea-4510-8b94-faa6899f6b06"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.891675 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "1c5eef11-d4e0-43cd-b305-c427f85d173a" (UID: "1c5eef11-d4e0-43cd-b305-c427f85d173a"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.919890 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-rjc6l"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.919919 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-scripts\") pod \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.919964 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-logs\") pod \"5b8254ca-83c1-49a8-b453-107577b54f01\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.919984 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vp78t\" (UniqueName: \"kubernetes.io/projected/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-kube-api-access-vp78t\") pod \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920014 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data-custom\") pod \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920035 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dg82b\" (UniqueName: \"kubernetes.io/projected/5b8254ca-83c1-49a8-b453-107577b54f01-kube-api-access-dg82b\") pod \"5b8254ca-83c1-49a8-b453-107577b54f01\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920057 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data\") pod \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920125 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-combined-ca-bundle\") pod \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920141 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-public-tls-certs\") pod \"5b8254ca-83c1-49a8-b453-107577b54f01\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920162 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"5b8254ca-83c1-49a8-b453-107577b54f01\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920187 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-config-data\") pod \"5b8254ca-83c1-49a8-b453-107577b54f01\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920205 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-etc-machine-id\") pod \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\" (UID: \"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920241 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-httpd-run\") pod \"5b8254ca-83c1-49a8-b453-107577b54f01\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920334 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-combined-ca-bundle\") pod \"5b8254ca-83c1-49a8-b453-107577b54f01\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920462 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-scripts\") pod \"5b8254ca-83c1-49a8-b453-107577b54f01\" (UID: \"5b8254ca-83c1-49a8-b453-107577b54f01\") " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920554 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-logs" (OuterVolumeSpecName: "logs") pod "5b8254ca-83c1-49a8-b453-107577b54f01" (UID: "5b8254ca-83c1-49a8-b453-107577b54f01"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920818 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920840 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920849 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920859 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920872 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmlwv\" (UniqueName: \"kubernetes.io/projected/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-api-access-gmlwv\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920880 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447099dc-1eea-4510-8b94-faa6899f6b06-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920888 4922 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920900 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.920908 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zd7lp\" (UniqueName: \"kubernetes.io/projected/447099dc-1eea-4510-8b94-faa6899f6b06-kube-api-access-zd7lp\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.921242 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-config-data" (OuterVolumeSpecName: "config-data") pod "447099dc-1eea-4510-8b94-faa6899f6b06" (UID: "447099dc-1eea-4510-8b94-faa6899f6b06"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.923090 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5b8254ca-83c1-49a8-b453-107577b54f01" (UID: "5b8254ca-83c1-49a8-b453-107577b54f01"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.923301 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" (UID: "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.925692 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "1c5eef11-d4e0-43cd-b305-c427f85d173a" (UID: "1c5eef11-d4e0-43cd-b305-c427f85d173a"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.934808 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "5b8254ca-83c1-49a8-b453-107577b54f01" (UID: "5b8254ca-83c1-49a8-b453-107577b54f01"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.936175 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-scripts" (OuterVolumeSpecName: "scripts") pod "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" (UID: "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.942626 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" (UID: "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.942800 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-rjc6l"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.943717 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "447099dc-1eea-4510-8b94-faa6899f6b06" (UID: "447099dc-1eea-4510-8b94-faa6899f6b06"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.958226 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-scripts" (OuterVolumeSpecName: "scripts") pod "5b8254ca-83c1-49a8-b453-107577b54f01" (UID: "5b8254ca-83c1-49a8-b453-107577b54f01"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.958302 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b8254ca-83c1-49a8-b453-107577b54f01-kube-api-access-dg82b" (OuterVolumeSpecName: "kube-api-access-dg82b") pod "5b8254ca-83c1-49a8-b453-107577b54f01" (UID: "5b8254ca-83c1-49a8-b453-107577b54f01"). InnerVolumeSpecName "kube-api-access-dg82b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.959848 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-kube-api-access-vp78t" (OuterVolumeSpecName: "kube-api-access-vp78t") pod "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" (UID: "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b"). InnerVolumeSpecName "kube-api-access-vp78t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.960974 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-4139-account-create-lssc6"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.965658 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell04139-account-delete-b4vm7"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.968079 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b8254ca-83c1-49a8-b453-107577b54f01" (UID: "5b8254ca-83c1-49a8-b453-107577b54f01"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.971169 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell04139-account-delete-b4vm7" Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.971623 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-4139-account-create-lssc6"] Sep 29 22:50:19 crc kubenswrapper[4922]: I0929 22:50:19.982782 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.011630 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-nrr6k" podUID="217b822b-44c6-465e-982a-23fa07d94b58" containerName="ovn-controller" probeResult="failure" output=< Sep 29 22:50:20 crc kubenswrapper[4922]: ERROR - Failed to get connection status from ovn-controller, ovn-appctl exit status: 0 Sep 29 22:50:20 crc kubenswrapper[4922]: > Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.019596 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c5eef11-d4e0-43cd-b305-c427f85d173a" (UID: "1c5eef11-d4e0-43cd-b305-c427f85d173a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.022652 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cm7m\" (UniqueName: \"kubernetes.io/projected/cd700631-7b12-4e93-9e40-747b09623e7e-kube-api-access-7cm7m\") pod \"cd700631-7b12-4e93-9e40-747b09623e7e\" (UID: \"cd700631-7b12-4e93-9e40-747b09623e7e\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023118 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023139 4922 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023151 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023160 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vp78t\" (UniqueName: \"kubernetes.io/projected/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-kube-api-access-vp78t\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023170 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023181 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dg82b\" (UniqueName: \"kubernetes.io/projected/5b8254ca-83c1-49a8-b453-107577b54f01-kube-api-access-dg82b\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023190 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023197 4922 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/447099dc-1eea-4510-8b94-faa6899f6b06-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023221 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023233 4922 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023243 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5b8254ca-83c1-49a8-b453-107577b54f01-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023253 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c5eef11-d4e0-43cd-b305-c427f85d173a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023261 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.023268 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.030795 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd700631-7b12-4e93-9e40-747b09623e7e-kube-api-access-7cm7m" (OuterVolumeSpecName: "kube-api-access-7cm7m") pod "cd700631-7b12-4e93-9e40-747b09623e7e" (UID: "cd700631-7b12-4e93-9e40-747b09623e7e"). InnerVolumeSpecName "kube-api-access-7cm7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.031409 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" (UID: "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.044050 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.044230 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:52692->10.217.0.198:8775: read: connection reset by peer" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.044624 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.198:8775/\": read tcp 10.217.0.2:52696->10.217.0.198:8775: read: connection reset by peer" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.054534 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5b8254ca-83c1-49a8-b453-107577b54f01" (UID: "5b8254ca-83c1-49a8-b453-107577b54f01"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.090887 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-config-data" (OuterVolumeSpecName: "config-data") pod "5b8254ca-83c1-49a8-b453-107577b54f01" (UID: "5b8254ca-83c1-49a8-b453-107577b54f01"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.096229 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data" (OuterVolumeSpecName: "config-data") pod "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" (UID: "b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.125010 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cm7m\" (UniqueName: \"kubernetes.io/projected/cd700631-7b12-4e93-9e40-747b09623e7e-kube-api-access-7cm7m\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.125181 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.125259 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.125313 4922 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.125377 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.125448 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b8254ca-83c1-49a8-b453-107577b54f01-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.136238 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.147813 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226117 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-combined-ca-bundle\") pod \"51f81c86-8f6d-4506-a940-5015032df5bd\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226152 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-scripts\") pod \"51f81c86-8f6d-4506-a940-5015032df5bd\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226182 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-config-data\") pod \"51f81c86-8f6d-4506-a940-5015032df5bd\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226218 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wr2t5\" (UniqueName: \"kubernetes.io/projected/51f81c86-8f6d-4506-a940-5015032df5bd-kube-api-access-wr2t5\") pod \"51f81c86-8f6d-4506-a940-5015032df5bd\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226248 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data-custom\") pod \"8788e59c-0cd3-43c5-8591-d452f9cb083a\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226268 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-internal-tls-certs\") pod \"8788e59c-0cd3-43c5-8591-d452f9cb083a\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226303 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51f81c86-8f6d-4506-a940-5015032df5bd-logs\") pod \"51f81c86-8f6d-4506-a940-5015032df5bd\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226341 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-scripts\") pod \"8788e59c-0cd3-43c5-8591-d452f9cb083a\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226363 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-internal-tls-certs\") pod \"51f81c86-8f6d-4506-a940-5015032df5bd\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226443 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8788e59c-0cd3-43c5-8591-d452f9cb083a-etc-machine-id\") pod \"8788e59c-0cd3-43c5-8591-d452f9cb083a\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226472 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-combined-ca-bundle\") pod \"8788e59c-0cd3-43c5-8591-d452f9cb083a\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226492 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data\") pod \"8788e59c-0cd3-43c5-8591-d452f9cb083a\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226506 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4frms\" (UniqueName: \"kubernetes.io/projected/8788e59c-0cd3-43c5-8591-d452f9cb083a-kube-api-access-4frms\") pod \"8788e59c-0cd3-43c5-8591-d452f9cb083a\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226522 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-public-tls-certs\") pod \"51f81c86-8f6d-4506-a940-5015032df5bd\" (UID: \"51f81c86-8f6d-4506-a940-5015032df5bd\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226540 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-public-tls-certs\") pod \"8788e59c-0cd3-43c5-8591-d452f9cb083a\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.226576 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8788e59c-0cd3-43c5-8591-d452f9cb083a-logs\") pod \"8788e59c-0cd3-43c5-8591-d452f9cb083a\" (UID: \"8788e59c-0cd3-43c5-8591-d452f9cb083a\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.227326 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8788e59c-0cd3-43c5-8591-d452f9cb083a-logs" (OuterVolumeSpecName: "logs") pod "8788e59c-0cd3-43c5-8591-d452f9cb083a" (UID: "8788e59c-0cd3-43c5-8591-d452f9cb083a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.228934 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51f81c86-8f6d-4506-a940-5015032df5bd-logs" (OuterVolumeSpecName: "logs") pod "51f81c86-8f6d-4506-a940-5015032df5bd" (UID: "51f81c86-8f6d-4506-a940-5015032df5bd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.229494 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8788e59c-0cd3-43c5-8591-d452f9cb083a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8788e59c-0cd3-43c5-8591-d452f9cb083a" (UID: "8788e59c-0cd3-43c5-8591-d452f9cb083a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.231771 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8788e59c-0cd3-43c5-8591-d452f9cb083a" (UID: "8788e59c-0cd3-43c5-8591-d452f9cb083a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.235878 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-scripts" (OuterVolumeSpecName: "scripts") pod "51f81c86-8f6d-4506-a940-5015032df5bd" (UID: "51f81c86-8f6d-4506-a940-5015032df5bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.236635 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-scripts" (OuterVolumeSpecName: "scripts") pod "8788e59c-0cd3-43c5-8591-d452f9cb083a" (UID: "8788e59c-0cd3-43c5-8591-d452f9cb083a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.252737 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51f81c86-8f6d-4506-a940-5015032df5bd-kube-api-access-wr2t5" (OuterVolumeSpecName: "kube-api-access-wr2t5") pod "51f81c86-8f6d-4506-a940-5015032df5bd" (UID: "51f81c86-8f6d-4506-a940-5015032df5bd"). InnerVolumeSpecName "kube-api-access-wr2t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.261030 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8788e59c-0cd3-43c5-8591-d452f9cb083a-kube-api-access-4frms" (OuterVolumeSpecName: "kube-api-access-4frms") pod "8788e59c-0cd3-43c5-8591-d452f9cb083a" (UID: "8788e59c-0cd3-43c5-8591-d452f9cb083a"). InnerVolumeSpecName "kube-api-access-4frms". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.295539 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8788e59c-0cd3-43c5-8591-d452f9cb083a" (UID: "8788e59c-0cd3-43c5-8591-d452f9cb083a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.298904 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8788e59c-0cd3-43c5-8591-d452f9cb083a" (UID: "8788e59c-0cd3-43c5-8591-d452f9cb083a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.304848 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-config-data" (OuterVolumeSpecName: "config-data") pod "51f81c86-8f6d-4506-a940-5015032df5bd" (UID: "51f81c86-8f6d-4506-a940-5015032df5bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.309439 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8788e59c-0cd3-43c5-8591-d452f9cb083a" (UID: "8788e59c-0cd3-43c5-8591-d452f9cb083a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.316751 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51f81c86-8f6d-4506-a940-5015032df5bd" (UID: "51f81c86-8f6d-4506-a940-5015032df5bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.321553 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f2294de9bd698e817730f095cb2af7a0db1eeff25afde1767c5f409c1a682198" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.324016 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f2294de9bd698e817730f095cb2af7a0db1eeff25afde1767c5f409c1a682198" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.325485 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f2294de9bd698e817730f095cb2af7a0db1eeff25afde1767c5f409c1a682198" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.325562 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="bf1c4a85-458f-4412-ae77-af6d87370b62" containerName="nova-cell1-conductor-conductor" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328579 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8788e59c-0cd3-43c5-8591-d452f9cb083a-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328610 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328623 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328634 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328649 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wr2t5\" (UniqueName: \"kubernetes.io/projected/51f81c86-8f6d-4506-a940-5015032df5bd-kube-api-access-wr2t5\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328663 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328674 4922 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328688 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51f81c86-8f6d-4506-a940-5015032df5bd-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328698 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328708 4922 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8788e59c-0cd3-43c5-8591-d452f9cb083a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328719 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328730 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4frms\" (UniqueName: \"kubernetes.io/projected/8788e59c-0cd3-43c5-8591-d452f9cb083a-kube-api-access-4frms\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.328741 4922 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.356177 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data" (OuterVolumeSpecName: "config-data") pod "8788e59c-0cd3-43c5-8591-d452f9cb083a" (UID: "8788e59c-0cd3-43c5-8591-d452f9cb083a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.368545 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "51f81c86-8f6d-4506-a940-5015032df5bd" (UID: "51f81c86-8f6d-4506-a940-5015032df5bd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.384283 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "51f81c86-8f6d-4506-a940-5015032df5bd" (UID: "51f81c86-8f6d-4506-a940-5015032df5bd"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.429715 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8788e59c-0cd3-43c5-8591-d452f9cb083a-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.429741 4922 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.429751 4922 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51f81c86-8f6d-4506-a940-5015032df5bd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.434127 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0082d9b7-4b81-47ca-8ba7-61429fdcc678" path="/var/lib/kubelet/pods/0082d9b7-4b81-47ca-8ba7-61429fdcc678/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.434641 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="441161de-0f1a-47e8-8adb-5d6a74989cb4" path="/var/lib/kubelet/pods/441161de-0f1a-47e8-8adb-5d6a74989cb4/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.435312 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71a94cd2-8bcf-4026-8668-1c55d6e1a8a7" path="/var/lib/kubelet/pods/71a94cd2-8bcf-4026-8668-1c55d6e1a8a7/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.435767 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="818de8af-e1df-4003-8a42-c0f067a7a0cc" path="/var/lib/kubelet/pods/818de8af-e1df-4003-8a42-c0f067a7a0cc/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.436781 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91ff7529-975a-44c8-a1dd-b7fd1f8708cf" path="/var/lib/kubelet/pods/91ff7529-975a-44c8-a1dd-b7fd1f8708cf/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.437201 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a38d4f6b-b91d-4bb8-9e78-54261e6a285e" path="/var/lib/kubelet/pods/a38d4f6b-b91d-4bb8-9e78-54261e6a285e/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.437687 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" path="/var/lib/kubelet/pods/a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.438814 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" path="/var/lib/kubelet/pods/aa85a019-83a6-4b71-abdb-7144be0105ae/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.439297 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2c58690-3113-44b8-b2df-cbe69dbd26e3" path="/var/lib/kubelet/pods/b2c58690-3113-44b8-b2df-cbe69dbd26e3/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.439732 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d094da53-d934-420c-9ea8-501e62d38d30" path="/var/lib/kubelet/pods/d094da53-d934-420c-9ea8-501e62d38d30/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.440568 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0a467a9-053f-4f41-b6b2-529130d42122" path="/var/lib/kubelet/pods/d0a467a9-053f-4f41-b6b2-529130d42122/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.440987 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df501d92-801d-4caa-8d1e-da48b45182cf" path="/var/lib/kubelet/pods/df501d92-801d-4caa-8d1e-da48b45182cf/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.441461 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfe7291a-aae6-4a8f-9f46-fa4594582dfe" path="/var/lib/kubelet/pods/dfe7291a-aae6-4a8f-9f46-fa4594582dfe/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.441873 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e02eaa01-5408-4268-b8ac-d7bd7917d1c6" path="/var/lib/kubelet/pods/e02eaa01-5408-4268-b8ac-d7bd7917d1c6/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.442768 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9ef1309-66cb-4726-b646-941bd87b27b1" path="/var/lib/kubelet/pods/f9ef1309-66cb-4726-b646-941bd87b27b1/volumes" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.447980 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-75fb76f858-mmqwn" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.154:9311/healthcheck\": dial tcp 10.217.0.154:9311: connect: connection refused" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.448113 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-75fb76f858-mmqwn" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.154:9311/healthcheck\": dial tcp 10.217.0.154:9311: connect: connection refused" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.536315 4922 generic.go:334] "Generic (PLEG): container finished" podID="51f81c86-8f6d-4506-a940-5015032df5bd" containerID="114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4" exitCode=0 Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.536384 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-556f68d56-jxmlq" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.536366 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-556f68d56-jxmlq" event={"ID":"51f81c86-8f6d-4506-a940-5015032df5bd","Type":"ContainerDied","Data":"114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.536450 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-556f68d56-jxmlq" event={"ID":"51f81c86-8f6d-4506-a940-5015032df5bd","Type":"ContainerDied","Data":"720c3aa536538d9da9cbed2c46c678d8960341138b7cb603422bb996d821a8ba"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.536473 4922 scope.go:117] "RemoveContainer" containerID="114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.545417 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"5b8254ca-83c1-49a8-b453-107577b54f01","Type":"ContainerDied","Data":"4bc42216ffedb37bd9c073af3cb65a977c43773e104a4af6d8bfe2f3f51e9e1a"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.545509 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.573505 4922 generic.go:334] "Generic (PLEG): container finished" podID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerID="cae850c08219cac4ce375335d50b6de2b3c7fb4df5328a2868d0238ce2244141" exitCode=0 Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.573534 4922 generic.go:334] "Generic (PLEG): container finished" podID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerID="13fd77bc452c12f556dd75f45bf37781a1403c31edf646ab5a140812ff690364" exitCode=0 Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.573537 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerDied","Data":"cae850c08219cac4ce375335d50b6de2b3c7fb4df5328a2868d0238ce2244141"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.573568 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerDied","Data":"13fd77bc452c12f556dd75f45bf37781a1403c31edf646ab5a140812ff690364"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.623845 4922 generic.go:334] "Generic (PLEG): container finished" podID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerID="e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1" exitCode=0 Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.624035 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8788e59c-0cd3-43c5-8591-d452f9cb083a","Type":"ContainerDied","Data":"e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.624070 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8788e59c-0cd3-43c5-8591-d452f9cb083a","Type":"ContainerDied","Data":"2265d26ef85dca95044cf74f9eb57eb6ea2e6da0b1454dc245a83a2f1f18cec9"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.624169 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.633029 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b","Type":"ContainerDied","Data":"e51efd7d0a777295c574ae8ad22bfded8af0d1c2e9f8e4ff1fde65b287829a3d"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.633174 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.644366 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell04139-account-delete-b4vm7" event={"ID":"cd700631-7b12-4e93-9e40-747b09623e7e","Type":"ContainerDied","Data":"ff41fd46b91a69f41c32753d106dce059c18608c1816bcfe9a5029fade3d043e"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.644410 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff41fd46b91a69f41c32753d106dce059c18608c1816bcfe9a5029fade3d043e" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.644467 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell04139-account-delete-b4vm7" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.651997 4922 generic.go:334] "Generic (PLEG): container finished" podID="8026992e-7dd1-42d9-b362-82febc75c072" containerID="3480a1ee34671883eedf597274a6f64fbdd34cfd4e44a319ae58d788dace7d36" exitCode=0 Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.652147 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8026992e-7dd1-42d9-b362-82febc75c072","Type":"ContainerDied","Data":"3480a1ee34671883eedf597274a6f64fbdd34cfd4e44a319ae58d788dace7d36"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.667652 4922 scope.go:117] "RemoveContainer" containerID="b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.667726 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.668701 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1c5eef11-d4e0-43cd-b305-c427f85d173a","Type":"ContainerDied","Data":"d454c4f40de7202a0be0dc9b199b91750d6b7978940474780a460ab06d38d087"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.668800 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.674091 4922 generic.go:334] "Generic (PLEG): container finished" podID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerID="6900ea52bd3a44fd3677b19a3a356664f86ce5fb715b38eadb63dcaaa0a2a2c5" exitCode=0 Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.674131 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75fb76f858-mmqwn" event={"ID":"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3","Type":"ContainerDied","Data":"6900ea52bd3a44fd3677b19a3a356664f86ce5fb715b38eadb63dcaaa0a2a2c5"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.677071 4922 generic.go:334] "Generic (PLEG): container finished" podID="1654e799-40ef-413a-8324-bb5b4f7a8f17" containerID="e253e117e33e1551c4b4d444e0f50023636461d456d2027cdac35eb3aeb6c536" exitCode=0 Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.677107 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1654e799-40ef-413a-8324-bb5b4f7a8f17","Type":"ContainerDied","Data":"e253e117e33e1551c4b4d444e0f50023636461d456d2027cdac35eb3aeb6c536"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.679457 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"447099dc-1eea-4510-8b94-faa6899f6b06","Type":"ContainerDied","Data":"497fb562b8f648f338fccc14b87906721944a700859fef0001dd16c335a5cfef"} Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.679523 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.679785 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.697698 4922 scope.go:117] "RemoveContainer" containerID="114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4" Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.698321 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4\": container with ID starting with 114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4 not found: ID does not exist" containerID="114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.698424 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4"} err="failed to get container status \"114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4\": rpc error: code = NotFound desc = could not find container \"114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4\": container with ID starting with 114ab2029c84965181c83f09fd32c13965cee1b5b5fb26e4ba953477c9c486b4 not found: ID does not exist" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.698451 4922 scope.go:117] "RemoveContainer" containerID="b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7" Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.698792 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7\": container with ID starting with b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7 not found: ID does not exist" containerID="b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.698815 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7"} err="failed to get container status \"b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7\": rpc error: code = NotFound desc = could not find container \"b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7\": container with ID starting with b0d4712c291b1947715ae1b603683a00d8640e8dfa7db01fbdde044e6993b4a7 not found: ID does not exist" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.698829 4922 scope.go:117] "RemoveContainer" containerID="e9c29c8849b943f694f98cfb6baeef978eb44c7f2f718c45a1d5e25db0e098e6" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.707916 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.734352 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.735723 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.739503 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.740986 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.741024 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="ovn-northd" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742377 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-nova-metadata-tls-certs\") pod \"8026992e-7dd1-42d9-b362-82febc75c072\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742483 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64df5\" (UniqueName: \"kubernetes.io/projected/1654e799-40ef-413a-8324-bb5b4f7a8f17-kube-api-access-64df5\") pod \"1654e799-40ef-413a-8324-bb5b4f7a8f17\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742510 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-internal-tls-certs\") pod \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742579 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lf7d4\" (UniqueName: \"kubernetes.io/projected/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-kube-api-access-lf7d4\") pod \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742600 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-config-data\") pod \"8026992e-7dd1-42d9-b362-82febc75c072\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742632 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-logs\") pod \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742660 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-kolla-config\") pod \"1654e799-40ef-413a-8324-bb5b4f7a8f17\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742689 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-config-data\") pod \"1654e799-40ef-413a-8324-bb5b4f7a8f17\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742728 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-memcached-tls-certs\") pod \"1654e799-40ef-413a-8324-bb5b4f7a8f17\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742801 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-combined-ca-bundle\") pod \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742837 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-combined-ca-bundle\") pod \"1654e799-40ef-413a-8324-bb5b4f7a8f17\" (UID: \"1654e799-40ef-413a-8324-bb5b4f7a8f17\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742857 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data\") pod \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742886 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data-custom\") pod \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742910 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-combined-ca-bundle\") pod \"8026992e-7dd1-42d9-b362-82febc75c072\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742946 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8026992e-7dd1-42d9-b362-82febc75c072-logs\") pod \"8026992e-7dd1-42d9-b362-82febc75c072\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742967 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-public-tls-certs\") pod \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\" (UID: \"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.742986 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96spp\" (UniqueName: \"kubernetes.io/projected/8026992e-7dd1-42d9-b362-82febc75c072-kube-api-access-96spp\") pod \"8026992e-7dd1-42d9-b362-82febc75c072\" (UID: \"8026992e-7dd1-42d9-b362-82febc75c072\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.745547 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-config-data" (OuterVolumeSpecName: "config-data") pod "1654e799-40ef-413a-8324-bb5b4f7a8f17" (UID: "1654e799-40ef-413a-8324-bb5b4f7a8f17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.748439 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-logs" (OuterVolumeSpecName: "logs") pod "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" (UID: "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.749326 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8026992e-7dd1-42d9-b362-82febc75c072-logs" (OuterVolumeSpecName: "logs") pod "8026992e-7dd1-42d9-b362-82febc75c072" (UID: "8026992e-7dd1-42d9-b362-82febc75c072"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.750233 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "1654e799-40ef-413a-8324-bb5b4f7a8f17" (UID: "1654e799-40ef-413a-8324-bb5b4f7a8f17"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.751151 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.753322 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8026992e-7dd1-42d9-b362-82febc75c072-kube-api-access-96spp" (OuterVolumeSpecName: "kube-api-access-96spp") pod "8026992e-7dd1-42d9-b362-82febc75c072" (UID: "8026992e-7dd1-42d9-b362-82febc75c072"). InnerVolumeSpecName "kube-api-access-96spp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.759592 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1654e799-40ef-413a-8324-bb5b4f7a8f17-kube-api-access-64df5" (OuterVolumeSpecName: "kube-api-access-64df5") pod "1654e799-40ef-413a-8324-bb5b4f7a8f17" (UID: "1654e799-40ef-413a-8324-bb5b4f7a8f17"). InnerVolumeSpecName "kube-api-access-64df5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.764720 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-656896d5d5-fczbx" podUID="cb84f99c-6d00-4023-9520-372992f3646e" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.157:9696/\": dial tcp 10.217.0.157:9696: connect: connection refused" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.766465 4922 scope.go:117] "RemoveContainer" containerID="f260bc8fa0de7751b280365c6bd0a0f523435c779c5725e4de6df27a0478f19c" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.767157 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-556f68d56-jxmlq"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.767160 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-kube-api-access-lf7d4" (OuterVolumeSpecName: "kube-api-access-lf7d4") pod "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" (UID: "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3"). InnerVolumeSpecName "kube-api-access-lf7d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.773834 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-556f68d56-jxmlq"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.802301 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" (UID: "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.804332 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.833420 4922 scope.go:117] "RemoveContainer" containerID="e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.837643 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.845004 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lf7d4\" (UniqueName: \"kubernetes.io/projected/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-kube-api-access-lf7d4\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.847130 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.847362 4922 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-kolla-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.847533 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1654e799-40ef-413a-8324-bb5b4f7a8f17-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.847664 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.847772 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8026992e-7dd1-42d9-b362-82febc75c072-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.847882 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96spp\" (UniqueName: \"kubernetes.io/projected/8026992e-7dd1-42d9-b362-82febc75c072-kube-api-access-96spp\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.847988 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64df5\" (UniqueName: \"kubernetes.io/projected/1654e799-40ef-413a-8324-bb5b4f7a8f17-kube-api-access-64df5\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.849863 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.850643 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" (UID: "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.859769 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.868769 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.874654 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.878841 4922 scope.go:117] "RemoveContainer" containerID="1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.883663 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.889948 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.910550 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell04139-account-delete-b4vm7"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.914713 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "1654e799-40ef-413a-8324-bb5b4f7a8f17" (UID: "1654e799-40ef-413a-8324-bb5b4f7a8f17"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.916605 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8026992e-7dd1-42d9-b362-82febc75c072" (UID: "8026992e-7dd1-42d9-b362-82febc75c072"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.921533 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell04139-account-delete-b4vm7"] Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.924359 4922 scope.go:117] "RemoveContainer" containerID="e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.925229 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" (UID: "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.926460 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1\": container with ID starting with e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1 not found: ID does not exist" containerID="e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.926501 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1"} err="failed to get container status \"e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1\": rpc error: code = NotFound desc = could not find container \"e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1\": container with ID starting with e2ffde03ef174da05f49312b789f0e3ad9899045a980335709ce5ce333b82da1 not found: ID does not exist" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.926521 4922 scope.go:117] "RemoveContainer" containerID="1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe" Sep 29 22:50:20 crc kubenswrapper[4922]: E0929 22:50:20.928705 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe\": container with ID starting with 1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe not found: ID does not exist" containerID="1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.928731 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe"} err="failed to get container status \"1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe\": rpc error: code = NotFound desc = could not find container \"1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe\": container with ID starting with 1155c64b2b1fab891904ec67f966c62bd70409fcef92b59a4945e7fbad1c09fe not found: ID does not exist" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.928746 4922 scope.go:117] "RemoveContainer" containerID="db4826cac698823aad07ded4c68c796267271768ac310ffcee02df8874d50b96" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.932381 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.943690 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1654e799-40ef-413a-8324-bb5b4f7a8f17" (UID: "1654e799-40ef-413a-8324-bb5b4f7a8f17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.950931 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5ghp\" (UniqueName: \"kubernetes.io/projected/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kube-api-access-r5ghp\") pod \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.951003 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-galera-tls-certs\") pod \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.951658 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.951682 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.951699 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.951712 4922 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.951724 4922 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/1654e799-40ef-413a-8324-bb5b4f7a8f17-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.955282 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-config-data" (OuterVolumeSpecName: "config-data") pod "8026992e-7dd1-42d9-b362-82febc75c072" (UID: "8026992e-7dd1-42d9-b362-82febc75c072"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.974930 4922 scope.go:117] "RemoveContainer" containerID="70b0fa4952e40bc0e0d7fd5d77a22557f9abd49c9cf9a6a2477a7399f2433c1f" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.982874 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kube-api-access-r5ghp" (OuterVolumeSpecName: "kube-api-access-r5ghp") pod "866ac5e5-219a-4afa-b6b3-0ca293c81f1d" (UID: "866ac5e5-219a-4afa-b6b3-0ca293c81f1d"). InnerVolumeSpecName "kube-api-access-r5ghp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.985830 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "8026992e-7dd1-42d9-b362-82febc75c072" (UID: "8026992e-7dd1-42d9-b362-82febc75c072"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.986292 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data" (OuterVolumeSpecName: "config-data") pod "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" (UID: "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.991936 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" (UID: "4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:20 crc kubenswrapper[4922]: I0929 22:50:20.998696 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "866ac5e5-219a-4afa-b6b3-0ca293c81f1d" (UID: "866ac5e5-219a-4afa-b6b3-0ca293c81f1d"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.005016 4922 scope.go:117] "RemoveContainer" containerID="9d81b17eb6b803729c871b1ba518d9db5c6074ebf6d349cf4471bdfb5a4bac22" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.024994 4922 scope.go:117] "RemoveContainer" containerID="4bade6528d890c812a2f6c25a51fb063416552fbc042c3c5effa4226f2415177" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052063 4922 scope.go:117] "RemoveContainer" containerID="ccbfc95659bd8ac0f5153a3e15fe5796f00cb82dbede7bd19b2c1d755699bbe7" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052377 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-generated\") pod \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052465 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052505 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-combined-ca-bundle\") pod \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052535 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-secrets\") pod \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052571 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kolla-config\") pod \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052640 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-operator-scripts\") pod \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052673 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-default\") pod \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\" (UID: \"866ac5e5-219a-4afa-b6b3-0ca293c81f1d\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052738 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "866ac5e5-219a-4afa-b6b3-0ca293c81f1d" (UID: "866ac5e5-219a-4afa-b6b3-0ca293c81f1d"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052954 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052970 4922 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052980 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5ghp\" (UniqueName: \"kubernetes.io/projected/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kube-api-access-r5ghp\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.052989 4922 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.053009 4922 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.053016 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-generated\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.053025 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8026992e-7dd1-42d9-b362-82febc75c072-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.053438 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "866ac5e5-219a-4afa-b6b3-0ca293c81f1d" (UID: "866ac5e5-219a-4afa-b6b3-0ca293c81f1d"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.053806 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "866ac5e5-219a-4afa-b6b3-0ca293c81f1d" (UID: "866ac5e5-219a-4afa-b6b3-0ca293c81f1d"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.054254 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "866ac5e5-219a-4afa-b6b3-0ca293c81f1d" (UID: "866ac5e5-219a-4afa-b6b3-0ca293c81f1d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.070588 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "866ac5e5-219a-4afa-b6b3-0ca293c81f1d" (UID: "866ac5e5-219a-4afa-b6b3-0ca293c81f1d"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.071052 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-secrets" (OuterVolumeSpecName: "secrets") pod "866ac5e5-219a-4afa-b6b3-0ca293c81f1d" (UID: "866ac5e5-219a-4afa-b6b3-0ca293c81f1d"). InnerVolumeSpecName "secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: E0929 22:50:21.086360 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 22:50:21 crc kubenswrapper[4922]: E0929 22:50:21.087715 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 22:50:21 crc kubenswrapper[4922]: E0929 22:50:21.091676 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 22:50:21 crc kubenswrapper[4922]: E0929 22:50:21.091708 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="3ead89b5-3aff-47b9-9516-0eaa33dca7aa" containerName="nova-scheduler-scheduler" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.104909 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "866ac5e5-219a-4afa-b6b3-0ca293c81f1d" (UID: "866ac5e5-219a-4afa-b6b3-0ca293c81f1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.154262 4922 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-operator-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.154294 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-config-data-default\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.154320 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.154329 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.154340 4922 reconciler_common.go:293] "Volume detached for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.154348 4922 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/866ac5e5-219a-4afa-b6b3-0ca293c81f1d-kolla-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.177159 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.255341 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.626929 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.638763 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660545 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-internal-tls-certs\") pod \"391e4250-b978-4ce4-811d-ae2a81a8500f\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660616 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-config-data\") pod \"391e4250-b978-4ce4-811d-ae2a81a8500f\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660638 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-combined-ca-bundle\") pod \"391e4250-b978-4ce4-811d-ae2a81a8500f\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660659 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cab5f5be-6bdd-481b-a07b-08491f6f2be5-erlang-cookie-secret\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660719 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660806 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-confd\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660855 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8d569\" (UniqueName: \"kubernetes.io/projected/391e4250-b978-4ce4-811d-ae2a81a8500f-kube-api-access-8d569\") pod \"391e4250-b978-4ce4-811d-ae2a81a8500f\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660882 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660908 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-227pz\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-kube-api-access-227pz\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.660965 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-plugins-conf\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.661007 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-public-tls-certs\") pod \"391e4250-b978-4ce4-811d-ae2a81a8500f\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.661039 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-server-conf\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.661062 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cab5f5be-6bdd-481b-a07b-08491f6f2be5-pod-info\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.661109 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-erlang-cookie\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.661134 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-plugins\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.661160 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-tls\") pod \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\" (UID: \"cab5f5be-6bdd-481b-a07b-08491f6f2be5\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.661184 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391e4250-b978-4ce4-811d-ae2a81a8500f-logs\") pod \"391e4250-b978-4ce4-811d-ae2a81a8500f\" (UID: \"391e4250-b978-4ce4-811d-ae2a81a8500f\") " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.662181 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391e4250-b978-4ce4-811d-ae2a81a8500f-logs" (OuterVolumeSpecName: "logs") pod "391e4250-b978-4ce4-811d-ae2a81a8500f" (UID: "391e4250-b978-4ce4-811d-ae2a81a8500f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.662779 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.663772 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.663883 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.680455 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.680635 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-kube-api-access-227pz" (OuterVolumeSpecName: "kube-api-access-227pz") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "kube-api-access-227pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: E0929 22:50:21.681537 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode56d31de_64f5_42a7_8243_7ac6d992a03d.slice/crio-conmon-b6074f8dda50ed5b4ce98889541af126fde4d515d920458ec2ced51aad77f19d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode56d31de_64f5_42a7_8243_7ac6d992a03d.slice/crio-b6074f8dda50ed5b4ce98889541af126fde4d515d920458ec2ced51aad77f19d.scope\": RecentStats: unable to find data in memory cache]" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.681834 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cab5f5be-6bdd-481b-a07b-08491f6f2be5-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.687302 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/391e4250-b978-4ce4-811d-ae2a81a8500f-kube-api-access-8d569" (OuterVolumeSpecName: "kube-api-access-8d569") pod "391e4250-b978-4ce4-811d-ae2a81a8500f" (UID: "391e4250-b978-4ce4-811d-ae2a81a8500f"). InnerVolumeSpecName "kube-api-access-8d569". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.688691 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/cab5f5be-6bdd-481b-a07b-08491f6f2be5-pod-info" (OuterVolumeSpecName: "pod-info") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.691712 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.695913 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_014a5aba-d41a-4647-8459-c770534a4a60/ovn-northd/0.log" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.696255 4922 generic.go:334] "Generic (PLEG): container finished" podID="014a5aba-d41a-4647-8459-c770534a4a60" containerID="372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5" exitCode=139 Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.696337 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"014a5aba-d41a-4647-8459-c770534a4a60","Type":"ContainerDied","Data":"372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.701815 4922 generic.go:334] "Generic (PLEG): container finished" podID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerID="19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127" exitCode=0 Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.701872 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.701881 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"391e4250-b978-4ce4-811d-ae2a81a8500f","Type":"ContainerDied","Data":"19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.701913 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"391e4250-b978-4ce4-811d-ae2a81a8500f","Type":"ContainerDied","Data":"c8eb20aadddd51f175050e57a2950a981ee6f98a098a6e3bb7dbd7640ad8550d"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.701931 4922 scope.go:117] "RemoveContainer" containerID="19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.705429 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-75fb76f858-mmqwn" event={"ID":"4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3","Type":"ContainerDied","Data":"b58903704e3604a9d7a8b3492852fd57de1554a4810248296bcf01ec3baf7782"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.705511 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-75fb76f858-mmqwn" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.713972 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1654e799-40ef-413a-8324-bb5b4f7a8f17","Type":"ContainerDied","Data":"5a36860f0e2452601f8f54e267156ad50e136c84c32898f42e31fa0eecde70e4"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.714074 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.717019 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "391e4250-b978-4ce4-811d-ae2a81a8500f" (UID: "391e4250-b978-4ce4-811d-ae2a81a8500f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.719805 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data" (OuterVolumeSpecName: "config-data") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.721680 4922 generic.go:334] "Generic (PLEG): container finished" podID="cab5f5be-6bdd-481b-a07b-08491f6f2be5" containerID="36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd" exitCode=0 Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.722967 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.723382 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cab5f5be-6bdd-481b-a07b-08491f6f2be5","Type":"ContainerDied","Data":"36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.723441 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"cab5f5be-6bdd-481b-a07b-08491f6f2be5","Type":"ContainerDied","Data":"b1d678a6d50424266b45f4e897abe44d2265e6bab757ecfa67241b7bd7b65447"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.730016 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-config-data" (OuterVolumeSpecName: "config-data") pod "391e4250-b978-4ce4-811d-ae2a81a8500f" (UID: "391e4250-b978-4ce4-811d-ae2a81a8500f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.748048 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "391e4250-b978-4ce4-811d-ae2a81a8500f" (UID: "391e4250-b978-4ce4-811d-ae2a81a8500f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.751365 4922 generic.go:334] "Generic (PLEG): container finished" podID="e56d31de-64f5-42a7-8243-7ac6d992a03d" containerID="b6074f8dda50ed5b4ce98889541af126fde4d515d920458ec2ced51aad77f19d" exitCode=0 Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.751424 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e56d31de-64f5-42a7-8243-7ac6d992a03d","Type":"ContainerDied","Data":"b6074f8dda50ed5b4ce98889541af126fde4d515d920458ec2ced51aad77f19d"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.752734 4922 generic.go:334] "Generic (PLEG): container finished" podID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" containerID="c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa" exitCode=0 Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.752769 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"866ac5e5-219a-4afa-b6b3-0ca293c81f1d","Type":"ContainerDied","Data":"c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.752783 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"866ac5e5-219a-4afa-b6b3-0ca293c81f1d","Type":"ContainerDied","Data":"80221817c0247a5eb947fcfbca2c7a261517d2e63942374268eab7554e0722e4"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.752835 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.754311 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-server-conf" (OuterVolumeSpecName: "server-conf") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.758662 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8026992e-7dd1-42d9-b362-82febc75c072","Type":"ContainerDied","Data":"413b6a61bdcb727ee4cc28c78ea98a8e4a44d8830cab40b697aeb42829caa211"} Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.758684 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.759133 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "391e4250-b978-4ce4-811d-ae2a81a8500f" (UID: "391e4250-b978-4ce4-811d-ae2a81a8500f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762479 4922 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762506 4922 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762515 4922 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/cab5f5be-6bdd-481b-a07b-08491f6f2be5-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762524 4922 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762535 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762546 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762554 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762562 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/391e4250-b978-4ce4-811d-ae2a81a8500f-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762571 4922 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762581 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762592 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/391e4250-b978-4ce4-811d-ae2a81a8500f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762600 4922 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/cab5f5be-6bdd-481b-a07b-08491f6f2be5-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762610 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cab5f5be-6bdd-481b-a07b-08491f6f2be5-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762619 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8d569\" (UniqueName: \"kubernetes.io/projected/391e4250-b978-4ce4-811d-ae2a81a8500f-kube-api-access-8d569\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762644 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.762653 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-227pz\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-kube-api-access-227pz\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.782642 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.788089 4922 scope.go:117] "RemoveContainer" containerID="6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.793087 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "cab5f5be-6bdd-481b-a07b-08491f6f2be5" (UID: "cab5f5be-6bdd-481b-a07b-08491f6f2be5"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.814599 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.822709 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.859655 4922 scope.go:117] "RemoveContainer" containerID="19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127" Sep 29 22:50:21 crc kubenswrapper[4922]: E0929 22:50:21.861615 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127\": container with ID starting with 19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127 not found: ID does not exist" containerID="19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.861670 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127"} err="failed to get container status \"19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127\": rpc error: code = NotFound desc = could not find container \"19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127\": container with ID starting with 19e1762fbb1294d190fa4d96579acc5139f635cb95b2dce3011f05cde085d127 not found: ID does not exist" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.861707 4922 scope.go:117] "RemoveContainer" containerID="6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7" Sep 29 22:50:21 crc kubenswrapper[4922]: E0929 22:50:21.862173 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7\": container with ID starting with 6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7 not found: ID does not exist" containerID="6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.862206 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7"} err="failed to get container status \"6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7\": rpc error: code = NotFound desc = could not find container \"6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7\": container with ID starting with 6591d94ab03072db5f38edc0120badc205aa740cf33a58e767dd3e0707e945f7 not found: ID does not exist" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.862230 4922 scope.go:117] "RemoveContainer" containerID="6900ea52bd3a44fd3677b19a3a356664f86ce5fb715b38eadb63dcaaa0a2a2c5" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.864362 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/cab5f5be-6bdd-481b-a07b-08491f6f2be5-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.864380 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.864598 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-75fb76f858-mmqwn"] Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.873749 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.898759 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-75fb76f858-mmqwn"] Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.913610 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.918763 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.928461 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.928584 4922 scope.go:117] "RemoveContainer" containerID="172f258b5cd1d2a007268349f6866d6235b2344ef34219c5f1c44b3260c97af7" Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.947622 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 22:50:21 crc kubenswrapper[4922]: E0929 22:50:21.966243 4922 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Sep 29 22:50:21 crc kubenswrapper[4922]: E0929 22:50:21.966333 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data podName:e56d31de-64f5-42a7-8243-7ac6d992a03d nodeName:}" failed. No retries permitted until 2025-09-29 22:50:29.966318588 +0000 UTC m=+1434.276607401 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data") pod "rabbitmq-server-0" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d") : configmap "rabbitmq-config-data" not found Sep 29 22:50:21 crc kubenswrapper[4922]: I0929 22:50:21.976239 4922 scope.go:117] "RemoveContainer" containerID="e253e117e33e1551c4b4d444e0f50023636461d456d2027cdac35eb3aeb6c536" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.015914 4922 scope.go:117] "RemoveContainer" containerID="36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.062071 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.066830 4922 scope.go:117] "RemoveContainer" containerID="7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.066817 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkcsp\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-kube-api-access-pkcsp\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067009 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067057 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e56d31de-64f5-42a7-8243-7ac6d992a03d-pod-info\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067086 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067149 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-erlang-cookie\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067181 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-server-conf\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067277 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-confd\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067370 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e56d31de-64f5-42a7-8243-7ac6d992a03d-erlang-cookie-secret\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067443 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-tls\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067469 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-plugins\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.067488 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-plugins-conf\") pod \"e56d31de-64f5-42a7-8243-7ac6d992a03d\" (UID: \"e56d31de-64f5-42a7-8243-7ac6d992a03d\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.070356 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.071203 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.077125 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.080913 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e56d31de-64f5-42a7-8243-7ac6d992a03d-pod-info" (OuterVolumeSpecName: "pod-info") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.082266 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.089947 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.089962 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e56d31de-64f5-42a7-8243-7ac6d992a03d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.090070 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data" (OuterVolumeSpecName: "config-data") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.090175 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-kube-api-access-pkcsp" (OuterVolumeSpecName: "kube-api-access-pkcsp") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "kube-api-access-pkcsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.090573 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.101072 4922 scope.go:117] "RemoveContainer" containerID="36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.101227 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 22:50:22 crc kubenswrapper[4922]: E0929 22:50:22.101668 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd\": container with ID starting with 36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd not found: ID does not exist" containerID="36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.101702 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd"} err="failed to get container status \"36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd\": rpc error: code = NotFound desc = could not find container \"36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd\": container with ID starting with 36c62e4e9031170245889487af94b9e65bbd2728be82b5b532ffcd6748ae18cd not found: ID does not exist" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.101727 4922 scope.go:117] "RemoveContainer" containerID="7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f" Sep 29 22:50:22 crc kubenswrapper[4922]: E0929 22:50:22.102368 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f\": container with ID starting with 7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f not found: ID does not exist" containerID="7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.102400 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f"} err="failed to get container status \"7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f\": rpc error: code = NotFound desc = could not find container \"7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f\": container with ID starting with 7e7b8f163869413c2ade1f0810897bdfea213b48f779c414166b9bbeec655c7f not found: ID does not exist" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.102414 4922 scope.go:117] "RemoveContainer" containerID="c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.107382 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.118681 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-server-conf" (OuterVolumeSpecName: "server-conf") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.120601 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_014a5aba-d41a-4647-8459-c770534a4a60/ovn-northd/0.log" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.120728 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.133611 4922 scope.go:117] "RemoveContainer" containerID="b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.165479 4922 scope.go:117] "RemoveContainer" containerID="c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa" Sep 29 22:50:22 crc kubenswrapper[4922]: E0929 22:50:22.165914 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa\": container with ID starting with c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa not found: ID does not exist" containerID="c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.165942 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa"} err="failed to get container status \"c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa\": rpc error: code = NotFound desc = could not find container \"c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa\": container with ID starting with c9de5f6cc1e5f40aa0c0ec78139049155a676acef43cb04dede3afde59dd70fa not found: ID does not exist" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.165963 4922 scope.go:117] "RemoveContainer" containerID="b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5" Sep 29 22:50:22 crc kubenswrapper[4922]: E0929 22:50:22.166141 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5\": container with ID starting with b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5 not found: ID does not exist" containerID="b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.166162 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5"} err="failed to get container status \"b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5\": rpc error: code = NotFound desc = could not find container \"b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5\": container with ID starting with b0ccca95f719a09d4c28fc907ec11105dad2d351eac49b3d25e01d0d1fd46fc5 not found: ID does not exist" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.166175 4922 scope.go:117] "RemoveContainer" containerID="3480a1ee34671883eedf597274a6f64fbdd34cfd4e44a319ae58d788dace7d36" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.168963 4922 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e56d31de-64f5-42a7-8243-7ac6d992a03d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169047 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169102 4922 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169153 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169205 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkcsp\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-kube-api-access-pkcsp\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169311 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169402 4922 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e56d31de-64f5-42a7-8243-7ac6d992a03d-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169480 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169553 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169635 4922 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e56d31de-64f5-42a7-8243-7ac6d992a03d-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.169433 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e56d31de-64f5-42a7-8243-7ac6d992a03d" (UID: "e56d31de-64f5-42a7-8243-7ac6d992a03d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.183868 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.189871 4922 scope.go:117] "RemoveContainer" containerID="e3c85f65a2283ba365db1c99edfb8fc2be5ee33fe88ee44750524b8d792b1cbf" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.270157 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-config\") pod \"014a5aba-d41a-4647-8459-c770534a4a60\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.270222 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/014a5aba-d41a-4647-8459-c770534a4a60-ovn-rundir\") pod \"014a5aba-d41a-4647-8459-c770534a4a60\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.270295 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-combined-ca-bundle\") pod \"014a5aba-d41a-4647-8459-c770534a4a60\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.270346 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jwtm\" (UniqueName: \"kubernetes.io/projected/014a5aba-d41a-4647-8459-c770534a4a60-kube-api-access-8jwtm\") pod \"014a5aba-d41a-4647-8459-c770534a4a60\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.270369 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-metrics-certs-tls-certs\") pod \"014a5aba-d41a-4647-8459-c770534a4a60\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.270436 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-scripts\") pod \"014a5aba-d41a-4647-8459-c770534a4a60\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.270525 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-ovn-northd-tls-certs\") pod \"014a5aba-d41a-4647-8459-c770534a4a60\" (UID: \"014a5aba-d41a-4647-8459-c770534a4a60\") " Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.270792 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e56d31de-64f5-42a7-8243-7ac6d992a03d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.270809 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.271528 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-config" (OuterVolumeSpecName: "config") pod "014a5aba-d41a-4647-8459-c770534a4a60" (UID: "014a5aba-d41a-4647-8459-c770534a4a60"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.271732 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/014a5aba-d41a-4647-8459-c770534a4a60-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "014a5aba-d41a-4647-8459-c770534a4a60" (UID: "014a5aba-d41a-4647-8459-c770534a4a60"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.272054 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-scripts" (OuterVolumeSpecName: "scripts") pod "014a5aba-d41a-4647-8459-c770534a4a60" (UID: "014a5aba-d41a-4647-8459-c770534a4a60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.275473 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/014a5aba-d41a-4647-8459-c770534a4a60-kube-api-access-8jwtm" (OuterVolumeSpecName: "kube-api-access-8jwtm") pod "014a5aba-d41a-4647-8459-c770534a4a60" (UID: "014a5aba-d41a-4647-8459-c770534a4a60"). InnerVolumeSpecName "kube-api-access-8jwtm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.292540 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "014a5aba-d41a-4647-8459-c770534a4a60" (UID: "014a5aba-d41a-4647-8459-c770534a4a60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.338817 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "014a5aba-d41a-4647-8459-c770534a4a60" (UID: "014a5aba-d41a-4647-8459-c770534a4a60"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.372093 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.372143 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jwtm\" (UniqueName: \"kubernetes.io/projected/014a5aba-d41a-4647-8459-c770534a4a60-kube-api-access-8jwtm\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.372164 4922 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.372182 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.372199 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/014a5aba-d41a-4647-8459-c770534a4a60-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.372215 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/014a5aba-d41a-4647-8459-c770534a4a60-ovn-rundir\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.411944 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "014a5aba-d41a-4647-8459-c770534a4a60" (UID: "014a5aba-d41a-4647-8459-c770534a4a60"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.469486 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1654e799-40ef-413a-8324-bb5b4f7a8f17" path="/var/lib/kubelet/pods/1654e799-40ef-413a-8324-bb5b4f7a8f17/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.470896 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c5eef11-d4e0-43cd-b305-c427f85d173a" path="/var/lib/kubelet/pods/1c5eef11-d4e0-43cd-b305-c427f85d173a/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.472193 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" path="/var/lib/kubelet/pods/391e4250-b978-4ce4-811d-ae2a81a8500f/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.474599 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="447099dc-1eea-4510-8b94-faa6899f6b06" path="/var/lib/kubelet/pods/447099dc-1eea-4510-8b94-faa6899f6b06/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.476206 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/014a5aba-d41a-4647-8459-c770534a4a60-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.476744 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" path="/var/lib/kubelet/pods/4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.478484 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51f81c86-8f6d-4506-a940-5015032df5bd" path="/var/lib/kubelet/pods/51f81c86-8f6d-4506-a940-5015032df5bd/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.482307 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b8254ca-83c1-49a8-b453-107577b54f01" path="/var/lib/kubelet/pods/5b8254ca-83c1-49a8-b453-107577b54f01/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.483374 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8026992e-7dd1-42d9-b362-82febc75c072" path="/var/lib/kubelet/pods/8026992e-7dd1-42d9-b362-82febc75c072/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.484968 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" path="/var/lib/kubelet/pods/866ac5e5-219a-4afa-b6b3-0ca293c81f1d/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.486509 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" path="/var/lib/kubelet/pods/8788e59c-0cd3-43c5-8591-d452f9cb083a/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.487571 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" path="/var/lib/kubelet/pods/b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.489383 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cab5f5be-6bdd-481b-a07b-08491f6f2be5" path="/var/lib/kubelet/pods/cab5f5be-6bdd-481b-a07b-08491f6f2be5/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.490377 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd700631-7b12-4e93-9e40-747b09623e7e" path="/var/lib/kubelet/pods/cd700631-7b12-4e93-9e40-747b09623e7e/volumes" Sep 29 22:50:22 crc kubenswrapper[4922]: E0929 22:50:22.752817 4922 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Sep 29 22:50:22 crc kubenswrapper[4922]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-09-29T22:50:16Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Sep 29 22:50:22 crc kubenswrapper[4922]: /etc/init.d/functions: line 589: 403 Alarm clock "$@" Sep 29 22:50:22 crc kubenswrapper[4922]: > execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-nrr6k" message=< Sep 29 22:50:22 crc kubenswrapper[4922]: Exiting ovn-controller (1) [FAILED] Sep 29 22:50:22 crc kubenswrapper[4922]: Killing ovn-controller (1) [ OK ] Sep 29 22:50:22 crc kubenswrapper[4922]: 2025-09-29T22:50:16Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Sep 29 22:50:22 crc kubenswrapper[4922]: /etc/init.d/functions: line 589: 403 Alarm clock "$@" Sep 29 22:50:22 crc kubenswrapper[4922]: > Sep 29 22:50:22 crc kubenswrapper[4922]: E0929 22:50:22.753242 4922 kuberuntime_container.go:691] "PreStop hook failed" err=< Sep 29 22:50:22 crc kubenswrapper[4922]: command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: 2025-09-29T22:50:16Z|00001|fatal_signal|WARN|terminating with signal 14 (Alarm clock) Sep 29 22:50:22 crc kubenswrapper[4922]: /etc/init.d/functions: line 589: 403 Alarm clock "$@" Sep 29 22:50:22 crc kubenswrapper[4922]: > pod="openstack/ovn-controller-nrr6k" podUID="217b822b-44c6-465e-982a-23fa07d94b58" containerName="ovn-controller" containerID="cri-o://d32993d34e081cdec7334222d057489baf75c3258521cbb12f048b68fcbd008d" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.753286 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-nrr6k" podUID="217b822b-44c6-465e-982a-23fa07d94b58" containerName="ovn-controller" containerID="cri-o://d32993d34e081cdec7334222d057489baf75c3258521cbb12f048b68fcbd008d" gracePeriod=23 Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.779013 4922 generic.go:334] "Generic (PLEG): container finished" podID="bf1c4a85-458f-4412-ae77-af6d87370b62" containerID="f2294de9bd698e817730f095cb2af7a0db1eeff25afde1767c5f409c1a682198" exitCode=0 Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.779094 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bf1c4a85-458f-4412-ae77-af6d87370b62","Type":"ContainerDied","Data":"f2294de9bd698e817730f095cb2af7a0db1eeff25afde1767c5f409c1a682198"} Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.787709 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e56d31de-64f5-42a7-8243-7ac6d992a03d","Type":"ContainerDied","Data":"77c1cf89ced628c70e19a175a9798b05aadd885cda871ffdb601dbf1fcaff309"} Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.787747 4922 scope.go:117] "RemoveContainer" containerID="b6074f8dda50ed5b4ce98889541af126fde4d515d920458ec2ced51aad77f19d" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.787830 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.806980 4922 generic.go:334] "Generic (PLEG): container finished" podID="19fc05a2-d210-4c05-8341-eafdbcc40dc1" containerID="f71b4f675a06896c34fd4e0be64b4fe734c8451e3903876095b9c46d095cd09a" exitCode=0 Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.807042 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-66cbdc5bdb-sfmk4" event={"ID":"19fc05a2-d210-4c05-8341-eafdbcc40dc1","Type":"ContainerDied","Data":"f71b4f675a06896c34fd4e0be64b4fe734c8451e3903876095b9c46d095cd09a"} Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.809888 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-nrr6k_217b822b-44c6-465e-982a-23fa07d94b58/ovn-controller/0.log" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.809914 4922 generic.go:334] "Generic (PLEG): container finished" podID="217b822b-44c6-465e-982a-23fa07d94b58" containerID="d32993d34e081cdec7334222d057489baf75c3258521cbb12f048b68fcbd008d" exitCode=139 Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.809948 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nrr6k" event={"ID":"217b822b-44c6-465e-982a-23fa07d94b58","Type":"ContainerDied","Data":"d32993d34e081cdec7334222d057489baf75c3258521cbb12f048b68fcbd008d"} Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.830721 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.841202 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.859946 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.861445 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_014a5aba-d41a-4647-8459-c770534a4a60/ovn-northd/0.log" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.862553 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"014a5aba-d41a-4647-8459-c770534a4a60","Type":"ContainerDied","Data":"87119ad2e8555685bf1f7f2fab85f97375a3a9545140b405f20634d11571f857"} Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.862646 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.904245 4922 scope.go:117] "RemoveContainer" containerID="34658a45d429ee4156c92b9c0c2d869fe7dd616fe8bb3f832f80da1bc9e277b3" Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.913907 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 22:50:22 crc kubenswrapper[4922]: I0929 22:50:22.923635 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:22.999080 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-internal-tls-certs\") pod \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:22.999155 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-fernet-keys\") pod \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:22.999180 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-public-tls-certs\") pod \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:22.999200 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pth5\" (UniqueName: \"kubernetes.io/projected/19fc05a2-d210-4c05-8341-eafdbcc40dc1-kube-api-access-6pth5\") pod \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:22.999249 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-credential-keys\") pod \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:22.999263 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-combined-ca-bundle\") pod \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:22.999328 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-scripts\") pod \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:22.999453 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-config-data\") pod \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\" (UID: \"19fc05a2-d210-4c05-8341-eafdbcc40dc1\") " Sep 29 22:50:23 crc kubenswrapper[4922]: E0929 22:50:23.015074 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18 is running failed: container process not found" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.016464 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "19fc05a2-d210-4c05-8341-eafdbcc40dc1" (UID: "19fc05a2-d210-4c05-8341-eafdbcc40dc1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.016487 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-scripts" (OuterVolumeSpecName: "scripts") pod "19fc05a2-d210-4c05-8341-eafdbcc40dc1" (UID: "19fc05a2-d210-4c05-8341-eafdbcc40dc1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.016557 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "19fc05a2-d210-4c05-8341-eafdbcc40dc1" (UID: "19fc05a2-d210-4c05-8341-eafdbcc40dc1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: E0929 22:50:23.016746 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18 is running failed: container process not found" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 29 22:50:23 crc kubenswrapper[4922]: E0929 22:50:23.016980 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18 is running failed: container process not found" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 29 22:50:23 crc kubenswrapper[4922]: E0929 22:50:23.017010 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="0a5d1af0-eb7d-46ad-b4f1-eceb10445896" containerName="nova-cell0-conductor-conductor" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.042985 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19fc05a2-d210-4c05-8341-eafdbcc40dc1-kube-api-access-6pth5" (OuterVolumeSpecName: "kube-api-access-6pth5") pod "19fc05a2-d210-4c05-8341-eafdbcc40dc1" (UID: "19fc05a2-d210-4c05-8341-eafdbcc40dc1"). InnerVolumeSpecName "kube-api-access-6pth5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.048487 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-config-data" (OuterVolumeSpecName: "config-data") pod "19fc05a2-d210-4c05-8341-eafdbcc40dc1" (UID: "19fc05a2-d210-4c05-8341-eafdbcc40dc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.060492 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.071877 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19fc05a2-d210-4c05-8341-eafdbcc40dc1" (UID: "19fc05a2-d210-4c05-8341-eafdbcc40dc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.074680 4922 scope.go:117] "RemoveContainer" containerID="18271197116a64d48cb8446e8bb69a2a9e1aea53d596b37826cbb2a61e257443" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.078485 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "19fc05a2-d210-4c05-8341-eafdbcc40dc1" (UID: "19fc05a2-d210-4c05-8341-eafdbcc40dc1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.100273 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "19fc05a2-d210-4c05-8341-eafdbcc40dc1" (UID: "19fc05a2-d210-4c05-8341-eafdbcc40dc1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101292 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-config-data\") pod \"bf1c4a85-458f-4412-ae77-af6d87370b62\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101672 4922 scope.go:117] "RemoveContainer" containerID="372544f951e2641f2c4fa003a27711d0aa9043dde07c92d8d597a700218326d5" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101695 4922 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101709 4922 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101720 4922 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101729 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pth5\" (UniqueName: \"kubernetes.io/projected/19fc05a2-d210-4c05-8341-eafdbcc40dc1-kube-api-access-6pth5\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101739 4922 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101748 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101757 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.101765 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19fc05a2-d210-4c05-8341-eafdbcc40dc1-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.122027 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-config-data" (OuterVolumeSpecName: "config-data") pod "bf1c4a85-458f-4412-ae77-af6d87370b62" (UID: "bf1c4a85-458f-4412-ae77-af6d87370b62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.154424 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-nrr6k_217b822b-44c6-465e-982a-23fa07d94b58/ovn-controller/0.log" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.154481 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nrr6k" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.202509 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run-ovn\") pod \"217b822b-44c6-465e-982a-23fa07d94b58\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.202582 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-combined-ca-bundle\") pod \"bf1c4a85-458f-4412-ae77-af6d87370b62\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.202716 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-log-ovn\") pod \"217b822b-44c6-465e-982a-23fa07d94b58\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.202785 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfqln\" (UniqueName: \"kubernetes.io/projected/bf1c4a85-458f-4412-ae77-af6d87370b62-kube-api-access-nfqln\") pod \"bf1c4a85-458f-4412-ae77-af6d87370b62\" (UID: \"bf1c4a85-458f-4412-ae77-af6d87370b62\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.202814 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-ovn-controller-tls-certs\") pod \"217b822b-44c6-465e-982a-23fa07d94b58\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.202867 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run\") pod \"217b822b-44c6-465e-982a-23fa07d94b58\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.202904 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-combined-ca-bundle\") pod \"217b822b-44c6-465e-982a-23fa07d94b58\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.202958 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czb6m\" (UniqueName: \"kubernetes.io/projected/217b822b-44c6-465e-982a-23fa07d94b58-kube-api-access-czb6m\") pod \"217b822b-44c6-465e-982a-23fa07d94b58\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.202979 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/217b822b-44c6-465e-982a-23fa07d94b58-scripts\") pod \"217b822b-44c6-465e-982a-23fa07d94b58\" (UID: \"217b822b-44c6-465e-982a-23fa07d94b58\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.203268 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run" (OuterVolumeSpecName: "var-run") pod "217b822b-44c6-465e-982a-23fa07d94b58" (UID: "217b822b-44c6-465e-982a-23fa07d94b58"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.203353 4922 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.203368 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.203357 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "217b822b-44c6-465e-982a-23fa07d94b58" (UID: "217b822b-44c6-465e-982a-23fa07d94b58"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.203429 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "217b822b-44c6-465e-982a-23fa07d94b58" (UID: "217b822b-44c6-465e-982a-23fa07d94b58"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.204473 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/217b822b-44c6-465e-982a-23fa07d94b58-scripts" (OuterVolumeSpecName: "scripts") pod "217b822b-44c6-465e-982a-23fa07d94b58" (UID: "217b822b-44c6-465e-982a-23fa07d94b58"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.208030 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf1c4a85-458f-4412-ae77-af6d87370b62-kube-api-access-nfqln" (OuterVolumeSpecName: "kube-api-access-nfqln") pod "bf1c4a85-458f-4412-ae77-af6d87370b62" (UID: "bf1c4a85-458f-4412-ae77-af6d87370b62"). InnerVolumeSpecName "kube-api-access-nfqln". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.211559 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/217b822b-44c6-465e-982a-23fa07d94b58-kube-api-access-czb6m" (OuterVolumeSpecName: "kube-api-access-czb6m") pod "217b822b-44c6-465e-982a-23fa07d94b58" (UID: "217b822b-44c6-465e-982a-23fa07d94b58"). InnerVolumeSpecName "kube-api-access-czb6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.224551 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "217b822b-44c6-465e-982a-23fa07d94b58" (UID: "217b822b-44c6-465e-982a-23fa07d94b58"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.240497 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf1c4a85-458f-4412-ae77-af6d87370b62" (UID: "bf1c4a85-458f-4412-ae77-af6d87370b62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.268559 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "217b822b-44c6-465e-982a-23fa07d94b58" (UID: "217b822b-44c6-465e-982a-23fa07d94b58"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.312169 4922 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.312199 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfqln\" (UniqueName: \"kubernetes.io/projected/bf1c4a85-458f-4412-ae77-af6d87370b62-kube-api-access-nfqln\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.312210 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.312218 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217b822b-44c6-465e-982a-23fa07d94b58-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.312227 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czb6m\" (UniqueName: \"kubernetes.io/projected/217b822b-44c6-465e-982a-23fa07d94b58-kube-api-access-czb6m\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.312235 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/217b822b-44c6-465e-982a-23fa07d94b58-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.312242 4922 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/217b822b-44c6-465e-982a-23fa07d94b58-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.312251 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf1c4a85-458f-4412-ae77-af6d87370b62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.455974 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.464117 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.472322 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.492958 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514057 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4799fcf9-24e7-4c61-9e5e-109105ec7003-logs\") pod \"4799fcf9-24e7-4c61-9e5e-109105ec7003\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514090 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data\") pod \"98ffad34-9721-4849-84ba-f14c518250ac\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514116 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-combined-ca-bundle\") pod \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514138 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98ffad34-9721-4849-84ba-f14c518250ac-logs\") pod \"98ffad34-9721-4849-84ba-f14c518250ac\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514157 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6rgk\" (UniqueName: \"kubernetes.io/projected/4799fcf9-24e7-4c61-9e5e-109105ec7003-kube-api-access-x6rgk\") pod \"4799fcf9-24e7-4c61-9e5e-109105ec7003\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514186 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jb72\" (UniqueName: \"kubernetes.io/projected/98ffad34-9721-4849-84ba-f14c518250ac-kube-api-access-9jb72\") pod \"98ffad34-9721-4849-84ba-f14c518250ac\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514203 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data-custom\") pod \"98ffad34-9721-4849-84ba-f14c518250ac\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514223 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data\") pod \"4799fcf9-24e7-4c61-9e5e-109105ec7003\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514246 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-combined-ca-bundle\") pod \"4799fcf9-24e7-4c61-9e5e-109105ec7003\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514675 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98ffad34-9721-4849-84ba-f14c518250ac-logs" (OuterVolumeSpecName: "logs") pod "98ffad34-9721-4849-84ba-f14c518250ac" (UID: "98ffad34-9721-4849-84ba-f14c518250ac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514770 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4799fcf9-24e7-4c61-9e5e-109105ec7003-logs" (OuterVolumeSpecName: "logs") pod "4799fcf9-24e7-4c61-9e5e-109105ec7003" (UID: "4799fcf9-24e7-4c61-9e5e-109105ec7003"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.514958 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-combined-ca-bundle\") pod \"98ffad34-9721-4849-84ba-f14c518250ac\" (UID: \"98ffad34-9721-4849-84ba-f14c518250ac\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.515003 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kffb4\" (UniqueName: \"kubernetes.io/projected/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-kube-api-access-kffb4\") pod \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.515022 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck5gz\" (UniqueName: \"kubernetes.io/projected/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-kube-api-access-ck5gz\") pod \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.515063 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-combined-ca-bundle\") pod \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.515081 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-config-data\") pod \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\" (UID: \"0a5d1af0-eb7d-46ad-b4f1-eceb10445896\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.515107 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data-custom\") pod \"4799fcf9-24e7-4c61-9e5e-109105ec7003\" (UID: \"4799fcf9-24e7-4c61-9e5e-109105ec7003\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.515143 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-config-data\") pod \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\" (UID: \"3ead89b5-3aff-47b9-9516-0eaa33dca7aa\") " Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.515676 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4799fcf9-24e7-4c61-9e5e-109105ec7003-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.515688 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98ffad34-9721-4849-84ba-f14c518250ac-logs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.521239 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4799fcf9-24e7-4c61-9e5e-109105ec7003-kube-api-access-x6rgk" (OuterVolumeSpecName: "kube-api-access-x6rgk") pod "4799fcf9-24e7-4c61-9e5e-109105ec7003" (UID: "4799fcf9-24e7-4c61-9e5e-109105ec7003"). InnerVolumeSpecName "kube-api-access-x6rgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.532689 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-kube-api-access-kffb4" (OuterVolumeSpecName: "kube-api-access-kffb4") pod "0a5d1af0-eb7d-46ad-b4f1-eceb10445896" (UID: "0a5d1af0-eb7d-46ad-b4f1-eceb10445896"). InnerVolumeSpecName "kube-api-access-kffb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.533061 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98ffad34-9721-4849-84ba-f14c518250ac-kube-api-access-9jb72" (OuterVolumeSpecName: "kube-api-access-9jb72") pod "98ffad34-9721-4849-84ba-f14c518250ac" (UID: "98ffad34-9721-4849-84ba-f14c518250ac"). InnerVolumeSpecName "kube-api-access-9jb72". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.533101 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "98ffad34-9721-4849-84ba-f14c518250ac" (UID: "98ffad34-9721-4849-84ba-f14c518250ac"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.533131 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-kube-api-access-ck5gz" (OuterVolumeSpecName: "kube-api-access-ck5gz") pod "3ead89b5-3aff-47b9-9516-0eaa33dca7aa" (UID: "3ead89b5-3aff-47b9-9516-0eaa33dca7aa"). InnerVolumeSpecName "kube-api-access-ck5gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.542482 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4799fcf9-24e7-4c61-9e5e-109105ec7003" (UID: "4799fcf9-24e7-4c61-9e5e-109105ec7003"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.547336 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a5d1af0-eb7d-46ad-b4f1-eceb10445896" (UID: "0a5d1af0-eb7d-46ad-b4f1-eceb10445896"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.548190 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ead89b5-3aff-47b9-9516-0eaa33dca7aa" (UID: "3ead89b5-3aff-47b9-9516-0eaa33dca7aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.563830 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4799fcf9-24e7-4c61-9e5e-109105ec7003" (UID: "4799fcf9-24e7-4c61-9e5e-109105ec7003"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.564227 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data" (OuterVolumeSpecName: "config-data") pod "98ffad34-9721-4849-84ba-f14c518250ac" (UID: "98ffad34-9721-4849-84ba-f14c518250ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.566803 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-config-data" (OuterVolumeSpecName: "config-data") pod "0a5d1af0-eb7d-46ad-b4f1-eceb10445896" (UID: "0a5d1af0-eb7d-46ad-b4f1-eceb10445896"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.568057 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98ffad34-9721-4849-84ba-f14c518250ac" (UID: "98ffad34-9721-4849-84ba-f14c518250ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.570984 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-config-data" (OuterVolumeSpecName: "config-data") pod "3ead89b5-3aff-47b9-9516-0eaa33dca7aa" (UID: "3ead89b5-3aff-47b9-9516-0eaa33dca7aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.584356 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data" (OuterVolumeSpecName: "config-data") pod "4799fcf9-24e7-4c61-9e5e-109105ec7003" (UID: "4799fcf9-24e7-4c61-9e5e-109105ec7003"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628264 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628293 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628306 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6rgk\" (UniqueName: \"kubernetes.io/projected/4799fcf9-24e7-4c61-9e5e-109105ec7003-kube-api-access-x6rgk\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628316 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jb72\" (UniqueName: \"kubernetes.io/projected/98ffad34-9721-4849-84ba-f14c518250ac-kube-api-access-9jb72\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628325 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628334 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628342 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628350 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98ffad34-9721-4849-84ba-f14c518250ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628359 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kffb4\" (UniqueName: \"kubernetes.io/projected/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-kube-api-access-kffb4\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628368 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck5gz\" (UniqueName: \"kubernetes.io/projected/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-kube-api-access-ck5gz\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628376 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628396 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a5d1af0-eb7d-46ad-b4f1-eceb10445896-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628406 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4799fcf9-24e7-4c61-9e5e-109105ec7003-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.628414 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ead89b5-3aff-47b9-9516-0eaa33dca7aa-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.884080 4922 generic.go:334] "Generic (PLEG): container finished" podID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerID="3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13" exitCode=0 Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.884154 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" event={"ID":"4799fcf9-24e7-4c61-9e5e-109105ec7003","Type":"ContainerDied","Data":"3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.884157 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.884184 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z" event={"ID":"4799fcf9-24e7-4c61-9e5e-109105ec7003","Type":"ContainerDied","Data":"c638d48f64d7d197b2acb87180e7f45bbb0455007dd477645ef338648572d8c7"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.884201 4922 scope.go:117] "RemoveContainer" containerID="3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.894917 4922 generic.go:334] "Generic (PLEG): container finished" podID="98ffad34-9721-4849-84ba-f14c518250ac" containerID="71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932" exitCode=0 Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.895002 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-8f78686f5-pj8pr" event={"ID":"98ffad34-9721-4849-84ba-f14c518250ac","Type":"ContainerDied","Data":"71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.895032 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-8f78686f5-pj8pr" event={"ID":"98ffad34-9721-4849-84ba-f14c518250ac","Type":"ContainerDied","Data":"85360d25da017ae5ee690d5a3af4906df942e2cdead12892023e658ff64d17a6"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.895054 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-8f78686f5-pj8pr" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.902932 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-66cbdc5bdb-sfmk4" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.902929 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-66cbdc5bdb-sfmk4" event={"ID":"19fc05a2-d210-4c05-8341-eafdbcc40dc1","Type":"ContainerDied","Data":"fa6b9b72afd5c458d18550633466c69ce65cc3f62fdb3620dbc165163cd37949"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.905929 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-nrr6k_217b822b-44c6-465e-982a-23fa07d94b58/ovn-controller/0.log" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.906022 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-nrr6k" event={"ID":"217b822b-44c6-465e-982a-23fa07d94b58","Type":"ContainerDied","Data":"97c187f67fcfefa943604ef4a265510e6fb2b145e038d78a1cabd98d9773e1de"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.906049 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-nrr6k" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.908408 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bf1c4a85-458f-4412-ae77-af6d87370b62","Type":"ContainerDied","Data":"5be570946029cf7c41009a06d35d671773276440570d4369de14f9fa8279aa60"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.908467 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.913747 4922 generic.go:334] "Generic (PLEG): container finished" podID="3ead89b5-3aff-47b9-9516-0eaa33dca7aa" containerID="52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c" exitCode=0 Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.913824 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3ead89b5-3aff-47b9-9516-0eaa33dca7aa","Type":"ContainerDied","Data":"52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.913849 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3ead89b5-3aff-47b9-9516-0eaa33dca7aa","Type":"ContainerDied","Data":"98f17d11639c7e732de438cad7f9c3c5e13182daad0a9507918d564a973c72f4"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.913905 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.916320 4922 generic.go:334] "Generic (PLEG): container finished" podID="0a5d1af0-eb7d-46ad-b4f1-eceb10445896" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" exitCode=0 Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.916475 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0a5d1af0-eb7d-46ad-b4f1-eceb10445896","Type":"ContainerDied","Data":"03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.916524 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0a5d1af0-eb7d-46ad-b4f1-eceb10445896","Type":"ContainerDied","Data":"1f236449c45f35491ce65fd93e932ac6e2fc60b779e62e134d7470e1d015108f"} Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.916569 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.925472 4922 scope.go:117] "RemoveContainer" containerID="a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.948356 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z"] Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.955569 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-8bfd4f7f6-rvh6z"] Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.975264 4922 scope.go:117] "RemoveContainer" containerID="3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13" Sep 29 22:50:23 crc kubenswrapper[4922]: E0929 22:50:23.993677 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13\": container with ID starting with 3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13 not found: ID does not exist" containerID="3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.993744 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13"} err="failed to get container status \"3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13\": rpc error: code = NotFound desc = could not find container \"3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13\": container with ID starting with 3dfd3a5faa37dd685f5d7911720c1b775690d6ce62e74ae8e6ec7366f0732f13 not found: ID does not exist" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.993778 4922 scope.go:117] "RemoveContainer" containerID="a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.993897 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-8f78686f5-pj8pr"] Sep 29 22:50:23 crc kubenswrapper[4922]: E0929 22:50:23.994609 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397\": container with ID starting with a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397 not found: ID does not exist" containerID="a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.994639 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397"} err="failed to get container status \"a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397\": rpc error: code = NotFound desc = could not find container \"a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397\": container with ID starting with a63081ca96099e287600ec664d1b7dffb2608f76c10a55dd4bef64186bff8397 not found: ID does not exist" Sep 29 22:50:23 crc kubenswrapper[4922]: I0929 22:50:23.994659 4922 scope.go:117] "RemoveContainer" containerID="71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.005593 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-8f78686f5-pj8pr"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.010959 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-66cbdc5bdb-sfmk4"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.017787 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-66cbdc5bdb-sfmk4"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.026361 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.032606 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.044033 4922 scope.go:117] "RemoveContainer" containerID="8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.045159 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.053086 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.058535 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-nrr6k"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.063002 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-nrr6k"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.067881 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.074418 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.074708 4922 scope.go:117] "RemoveContainer" containerID="71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932" Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.075075 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932\": container with ID starting with 71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932 not found: ID does not exist" containerID="71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.075110 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932"} err="failed to get container status \"71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932\": rpc error: code = NotFound desc = could not find container \"71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932\": container with ID starting with 71684fde57b1547e764d29e15593758553cb40f48654d15712c34c8819ae0932 not found: ID does not exist" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.075129 4922 scope.go:117] "RemoveContainer" containerID="8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9" Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.075508 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9\": container with ID starting with 8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9 not found: ID does not exist" containerID="8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.075550 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9"} err="failed to get container status \"8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9\": rpc error: code = NotFound desc = could not find container \"8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9\": container with ID starting with 8a6a6ed13e2bf81ac408d584ebbf7fad83f5127299de978a0418896fb48e82d9 not found: ID does not exist" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.075580 4922 scope.go:117] "RemoveContainer" containerID="f71b4f675a06896c34fd4e0be64b4fe734c8451e3903876095b9c46d095cd09a" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.105896 4922 scope.go:117] "RemoveContainer" containerID="d32993d34e081cdec7334222d057489baf75c3258521cbb12f048b68fcbd008d" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.129957 4922 scope.go:117] "RemoveContainer" containerID="f2294de9bd698e817730f095cb2af7a0db1eeff25afde1767c5f409c1a682198" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.151704 4922 scope.go:117] "RemoveContainer" containerID="52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.172512 4922 scope.go:117] "RemoveContainer" containerID="52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c" Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.173164 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c\": container with ID starting with 52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c not found: ID does not exist" containerID="52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.173197 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c"} err="failed to get container status \"52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c\": rpc error: code = NotFound desc = could not find container \"52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c\": container with ID starting with 52f12a5dce6a8d740b9688a365ca59174d180b01b82be7351467c71800853f1c not found: ID does not exist" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.173220 4922 scope.go:117] "RemoveContainer" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.197535 4922 scope.go:117] "RemoveContainer" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.197950 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18\": container with ID starting with 03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18 not found: ID does not exist" containerID="03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.197979 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18"} err="failed to get container status \"03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18\": rpc error: code = NotFound desc = could not find container \"03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18\": container with ID starting with 03cd1d2e4e89fc80872f2382fa5388d79addfefa245e833c2a68e0f1d9d3ed18 not found: ID does not exist" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.430229 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="014a5aba-d41a-4647-8459-c770534a4a60" path="/var/lib/kubelet/pods/014a5aba-d41a-4647-8459-c770534a4a60/volumes" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.430780 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a5d1af0-eb7d-46ad-b4f1-eceb10445896" path="/var/lib/kubelet/pods/0a5d1af0-eb7d-46ad-b4f1-eceb10445896/volumes" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.431208 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19fc05a2-d210-4c05-8341-eafdbcc40dc1" path="/var/lib/kubelet/pods/19fc05a2-d210-4c05-8341-eafdbcc40dc1/volumes" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.432123 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="217b822b-44c6-465e-982a-23fa07d94b58" path="/var/lib/kubelet/pods/217b822b-44c6-465e-982a-23fa07d94b58/volumes" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.432605 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ead89b5-3aff-47b9-9516-0eaa33dca7aa" path="/var/lib/kubelet/pods/3ead89b5-3aff-47b9-9516-0eaa33dca7aa/volumes" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.433094 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4799fcf9-24e7-4c61-9e5e-109105ec7003" path="/var/lib/kubelet/pods/4799fcf9-24e7-4c61-9e5e-109105ec7003/volumes" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.434063 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98ffad34-9721-4849-84ba-f14c518250ac" path="/var/lib/kubelet/pods/98ffad34-9721-4849-84ba-f14c518250ac/volumes" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.434567 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf1c4a85-458f-4412-ae77-af6d87370b62" path="/var/lib/kubelet/pods/bf1c4a85-458f-4412-ae77-af6d87370b62/volumes" Sep 29 22:50:24 crc kubenswrapper[4922]: I0929 22:50:24.435199 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e56d31de-64f5-42a7-8243-7ac6d992a03d" path="/var/lib/kubelet/pods/e56d31de-64f5-42a7-8243-7ac6d992a03d/volumes" Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.649472 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.650052 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.650625 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.650678 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.652519 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.661417 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.663562 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:24 crc kubenswrapper[4922]: E0929 22:50:24.663650 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" Sep 29 22:50:25 crc kubenswrapper[4922]: I0929 22:50:25.951503 4922 generic.go:334] "Generic (PLEG): container finished" podID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerID="bcdb4f9a717ab87bcdb1ad12cb92702bad850a19ba4db942f610381ad13eb2b3" exitCode=0 Sep 29 22:50:25 crc kubenswrapper[4922]: I0929 22:50:25.951562 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerDied","Data":"bcdb4f9a717ab87bcdb1ad12cb92702bad850a19ba4db942f610381ad13eb2b3"} Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.014446 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.075779 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-sg-core-conf-yaml\") pod \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.075838 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-combined-ca-bundle\") pod \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.075902 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-log-httpd\") pod \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.075957 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-run-httpd\") pod \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.076036 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-ceilometer-tls-certs\") pod \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.076078 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-scripts\") pod \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.076118 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqg8k\" (UniqueName: \"kubernetes.io/projected/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-kube-api-access-zqg8k\") pod \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.076137 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-config-data\") pod \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\" (UID: \"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031\") " Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.076429 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" (UID: "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.076493 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" (UID: "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.080853 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-kube-api-access-zqg8k" (OuterVolumeSpecName: "kube-api-access-zqg8k") pod "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" (UID: "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031"). InnerVolumeSpecName "kube-api-access-zqg8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.093120 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-scripts" (OuterVolumeSpecName: "scripts") pod "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" (UID: "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.110280 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" (UID: "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.123895 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" (UID: "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.163739 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" (UID: "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.178028 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.178062 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqg8k\" (UniqueName: \"kubernetes.io/projected/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-kube-api-access-zqg8k\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.178073 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.178082 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.178091 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.178100 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.178108 4922 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.198994 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-config-data" (OuterVolumeSpecName: "config-data") pod "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" (UID: "0617fd51-e6f8-4cb7-8b63-cadf8ddaf031"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.291437 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.978810 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0617fd51-e6f8-4cb7-8b63-cadf8ddaf031","Type":"ContainerDied","Data":"5f0ee15ea3a4662f650cdb9075066dfcfb6cc301c07c2ae3eac8ec7f566de08a"} Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.978868 4922 scope.go:117] "RemoveContainer" containerID="cae850c08219cac4ce375335d50b6de2b3c7fb4df5328a2868d0238ce2244141" Sep 29 22:50:26 crc kubenswrapper[4922]: I0929 22:50:26.979078 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 22:50:27 crc kubenswrapper[4922]: I0929 22:50:27.023206 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:50:27 crc kubenswrapper[4922]: I0929 22:50:27.026939 4922 scope.go:117] "RemoveContainer" containerID="ce05b6489484a12ed071a747de16f8ec141d68489e436390d82db8647d849b69" Sep 29 22:50:27 crc kubenswrapper[4922]: I0929 22:50:27.037841 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 22:50:27 crc kubenswrapper[4922]: I0929 22:50:27.052625 4922 scope.go:117] "RemoveContainer" containerID="bcdb4f9a717ab87bcdb1ad12cb92702bad850a19ba4db942f610381ad13eb2b3" Sep 29 22:50:27 crc kubenswrapper[4922]: I0929 22:50:27.088284 4922 scope.go:117] "RemoveContainer" containerID="13fd77bc452c12f556dd75f45bf37781a1403c31edf646ab5a140812ff690364" Sep 29 22:50:28 crc kubenswrapper[4922]: I0929 22:50:28.438312 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" path="/var/lib/kubelet/pods/0617fd51-e6f8-4cb7-8b63-cadf8ddaf031/volumes" Sep 29 22:50:28 crc kubenswrapper[4922]: I0929 22:50:28.913171 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:50:28 crc kubenswrapper[4922]: I0929 22:50:28.913264 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:50:29 crc kubenswrapper[4922]: E0929 22:50:29.650085 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:29 crc kubenswrapper[4922]: E0929 22:50:29.650453 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:29 crc kubenswrapper[4922]: E0929 22:50:29.650983 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:29 crc kubenswrapper[4922]: E0929 22:50:29.651027 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" Sep 29 22:50:29 crc kubenswrapper[4922]: E0929 22:50:29.652614 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:29 crc kubenswrapper[4922]: E0929 22:50:29.654785 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:29 crc kubenswrapper[4922]: E0929 22:50:29.656912 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:29 crc kubenswrapper[4922]: E0929 22:50:29.656999 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" Sep 29 22:50:34 crc kubenswrapper[4922]: E0929 22:50:34.649971 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:34 crc kubenswrapper[4922]: E0929 22:50:34.650947 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:34 crc kubenswrapper[4922]: E0929 22:50:34.651447 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:34 crc kubenswrapper[4922]: E0929 22:50:34.651529 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" Sep 29 22:50:34 crc kubenswrapper[4922]: E0929 22:50:34.652193 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:34 crc kubenswrapper[4922]: E0929 22:50:34.654750 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:34 crc kubenswrapper[4922]: E0929 22:50:34.658318 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:34 crc kubenswrapper[4922]: E0929 22:50:34.658370 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.106583 4922 generic.go:334] "Generic (PLEG): container finished" podID="cb84f99c-6d00-4023-9520-372992f3646e" containerID="ee6bfbd535ddb2568b60ca0420863ffa93c242860084bfd9305faf5ae6f7c154" exitCode=0 Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.106670 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-656896d5d5-fczbx" event={"ID":"cb84f99c-6d00-4023-9520-372992f3646e","Type":"ContainerDied","Data":"ee6bfbd535ddb2568b60ca0420863ffa93c242860084bfd9305faf5ae6f7c154"} Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.339299 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.525315 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkfjq\" (UniqueName: \"kubernetes.io/projected/cb84f99c-6d00-4023-9520-372992f3646e-kube-api-access-qkfjq\") pod \"cb84f99c-6d00-4023-9520-372992f3646e\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.525597 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-ovndb-tls-certs\") pod \"cb84f99c-6d00-4023-9520-372992f3646e\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.525672 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-config\") pod \"cb84f99c-6d00-4023-9520-372992f3646e\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.525727 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-public-tls-certs\") pod \"cb84f99c-6d00-4023-9520-372992f3646e\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.525800 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-internal-tls-certs\") pod \"cb84f99c-6d00-4023-9520-372992f3646e\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.526047 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-combined-ca-bundle\") pod \"cb84f99c-6d00-4023-9520-372992f3646e\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.526112 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-httpd-config\") pod \"cb84f99c-6d00-4023-9520-372992f3646e\" (UID: \"cb84f99c-6d00-4023-9520-372992f3646e\") " Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.533875 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "cb84f99c-6d00-4023-9520-372992f3646e" (UID: "cb84f99c-6d00-4023-9520-372992f3646e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.546857 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb84f99c-6d00-4023-9520-372992f3646e-kube-api-access-qkfjq" (OuterVolumeSpecName: "kube-api-access-qkfjq") pod "cb84f99c-6d00-4023-9520-372992f3646e" (UID: "cb84f99c-6d00-4023-9520-372992f3646e"). InnerVolumeSpecName "kube-api-access-qkfjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.587275 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb84f99c-6d00-4023-9520-372992f3646e" (UID: "cb84f99c-6d00-4023-9520-372992f3646e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.606453 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cb84f99c-6d00-4023-9520-372992f3646e" (UID: "cb84f99c-6d00-4023-9520-372992f3646e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.607119 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cb84f99c-6d00-4023-9520-372992f3646e" (UID: "cb84f99c-6d00-4023-9520-372992f3646e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.608704 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-config" (OuterVolumeSpecName: "config") pod "cb84f99c-6d00-4023-9520-372992f3646e" (UID: "cb84f99c-6d00-4023-9520-372992f3646e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.628559 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkfjq\" (UniqueName: \"kubernetes.io/projected/cb84f99c-6d00-4023-9520-372992f3646e-kube-api-access-qkfjq\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.628609 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.628635 4922 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.628654 4922 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.628673 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.628691 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.638841 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "cb84f99c-6d00-4023-9520-372992f3646e" (UID: "cb84f99c-6d00-4023-9520-372992f3646e"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 22:50:37 crc kubenswrapper[4922]: I0929 22:50:37.729846 4922 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb84f99c-6d00-4023-9520-372992f3646e-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:38 crc kubenswrapper[4922]: I0929 22:50:38.123464 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-656896d5d5-fczbx" event={"ID":"cb84f99c-6d00-4023-9520-372992f3646e","Type":"ContainerDied","Data":"a097d1c2ebd139ad62d98f468d014014923cb761c3954ba9d5533a33b1219c43"} Sep 29 22:50:38 crc kubenswrapper[4922]: I0929 22:50:38.123623 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-656896d5d5-fczbx" Sep 29 22:50:38 crc kubenswrapper[4922]: I0929 22:50:38.124552 4922 scope.go:117] "RemoveContainer" containerID="0cb082da33df2e4d81994a52b8d0e177856277b29b39509b8cec8831f4d69eb3" Sep 29 22:50:38 crc kubenswrapper[4922]: I0929 22:50:38.165650 4922 scope.go:117] "RemoveContainer" containerID="ee6bfbd535ddb2568b60ca0420863ffa93c242860084bfd9305faf5ae6f7c154" Sep 29 22:50:38 crc kubenswrapper[4922]: I0929 22:50:38.186530 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-656896d5d5-fczbx"] Sep 29 22:50:38 crc kubenswrapper[4922]: I0929 22:50:38.197385 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-656896d5d5-fczbx"] Sep 29 22:50:38 crc kubenswrapper[4922]: I0929 22:50:38.438687 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb84f99c-6d00-4023-9520-372992f3646e" path="/var/lib/kubelet/pods/cb84f99c-6d00-4023-9520-372992f3646e/volumes" Sep 29 22:50:39 crc kubenswrapper[4922]: E0929 22:50:39.649198 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:39 crc kubenswrapper[4922]: E0929 22:50:39.650105 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:39 crc kubenswrapper[4922]: E0929 22:50:39.650788 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:39 crc kubenswrapper[4922]: E0929 22:50:39.650853 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" Sep 29 22:50:39 crc kubenswrapper[4922]: E0929 22:50:39.651110 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:39 crc kubenswrapper[4922]: E0929 22:50:39.652659 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:39 crc kubenswrapper[4922]: E0929 22:50:39.654280 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:39 crc kubenswrapper[4922]: E0929 22:50:39.654331 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" Sep 29 22:50:44 crc kubenswrapper[4922]: E0929 22:50:44.649706 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:44 crc kubenswrapper[4922]: E0929 22:50:44.651643 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:44 crc kubenswrapper[4922]: E0929 22:50:44.651711 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:44 crc kubenswrapper[4922]: E0929 22:50:44.652124 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Sep 29 22:50:44 crc kubenswrapper[4922]: E0929 22:50:44.652168 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" Sep 29 22:50:44 crc kubenswrapper[4922]: E0929 22:50:44.653972 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:44 crc kubenswrapper[4922]: E0929 22:50:44.655977 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Sep 29 22:50:44 crc kubenswrapper[4922]: E0929 22:50:44.656044 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-4jkkx" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.001179 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-4jkkx_4594a140-3321-4a34-ab35-65ad3560b085/ovs-vswitchd/0.log" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.002412 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.171710 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-log\") pod \"4594a140-3321-4a34-ab35-65ad3560b085\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.171767 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-lib\") pod \"4594a140-3321-4a34-ab35-65ad3560b085\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.171838 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-run\") pod \"4594a140-3321-4a34-ab35-65ad3560b085\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.171893 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4594a140-3321-4a34-ab35-65ad3560b085-scripts\") pod \"4594a140-3321-4a34-ab35-65ad3560b085\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.171953 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-etc-ovs\") pod \"4594a140-3321-4a34-ab35-65ad3560b085\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.171973 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmghh\" (UniqueName: \"kubernetes.io/projected/4594a140-3321-4a34-ab35-65ad3560b085-kube-api-access-zmghh\") pod \"4594a140-3321-4a34-ab35-65ad3560b085\" (UID: \"4594a140-3321-4a34-ab35-65ad3560b085\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.172405 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-run" (OuterVolumeSpecName: "var-run") pod "4594a140-3321-4a34-ab35-65ad3560b085" (UID: "4594a140-3321-4a34-ab35-65ad3560b085"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.172439 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-log" (OuterVolumeSpecName: "var-log") pod "4594a140-3321-4a34-ab35-65ad3560b085" (UID: "4594a140-3321-4a34-ab35-65ad3560b085"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.172529 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "4594a140-3321-4a34-ab35-65ad3560b085" (UID: "4594a140-3321-4a34-ab35-65ad3560b085"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.172532 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-lib" (OuterVolumeSpecName: "var-lib") pod "4594a140-3321-4a34-ab35-65ad3560b085" (UID: "4594a140-3321-4a34-ab35-65ad3560b085"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.173940 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4594a140-3321-4a34-ab35-65ad3560b085-scripts" (OuterVolumeSpecName: "scripts") pod "4594a140-3321-4a34-ab35-65ad3560b085" (UID: "4594a140-3321-4a34-ab35-65ad3560b085"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.181373 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4594a140-3321-4a34-ab35-65ad3560b085-kube-api-access-zmghh" (OuterVolumeSpecName: "kube-api-access-zmghh") pod "4594a140-3321-4a34-ab35-65ad3560b085" (UID: "4594a140-3321-4a34-ab35-65ad3560b085"). InnerVolumeSpecName "kube-api-access-zmghh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.240373 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerID="d96b9721a809407b59045e31403c469338494e50e97df48dd1a0aa74503cb5bd" exitCode=137 Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.240416 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"d96b9721a809407b59045e31403c469338494e50e97df48dd1a0aa74503cb5bd"} Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.242025 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-4jkkx_4594a140-3321-4a34-ab35-65ad3560b085/ovs-vswitchd/0.log" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.242698 4922 generic.go:334] "Generic (PLEG): container finished" podID="4594a140-3321-4a34-ab35-65ad3560b085" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" exitCode=137 Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.242725 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4jkkx" event={"ID":"4594a140-3321-4a34-ab35-65ad3560b085","Type":"ContainerDied","Data":"cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c"} Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.242758 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-4jkkx" event={"ID":"4594a140-3321-4a34-ab35-65ad3560b085","Type":"ContainerDied","Data":"591d8a3bc54f16d3df0a65cdc361508792038fcbd0b26e36f247cab59f266e9b"} Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.242760 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-4jkkx" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.242772 4922 scope.go:117] "RemoveContainer" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.273452 4922 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-etc-ovs\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.273492 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmghh\" (UniqueName: \"kubernetes.io/projected/4594a140-3321-4a34-ab35-65ad3560b085-kube-api-access-zmghh\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.273505 4922 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-log\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.273519 4922 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-lib\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.273531 4922 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4594a140-3321-4a34-ab35-65ad3560b085-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.273542 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4594a140-3321-4a34-ab35-65ad3560b085-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.276792 4922 scope.go:117] "RemoveContainer" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.279315 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-4jkkx"] Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.292565 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-4jkkx"] Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.303135 4922 scope.go:117] "RemoveContainer" containerID="0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.340972 4922 scope.go:117] "RemoveContainer" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" Sep 29 22:50:46 crc kubenswrapper[4922]: E0929 22:50:46.341533 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c\": container with ID starting with cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c not found: ID does not exist" containerID="cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.341599 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c"} err="failed to get container status \"cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c\": rpc error: code = NotFound desc = could not find container \"cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c\": container with ID starting with cd4572c6c6a57a490542b1a87103fce64cbbea689bd83bd4e39682564c31528c not found: ID does not exist" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.341632 4922 scope.go:117] "RemoveContainer" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" Sep 29 22:50:46 crc kubenswrapper[4922]: E0929 22:50:46.342433 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a\": container with ID starting with 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a not found: ID does not exist" containerID="2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.342494 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a"} err="failed to get container status \"2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a\": rpc error: code = NotFound desc = could not find container \"2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a\": container with ID starting with 2ee8395b6b60b388b70d31e4fb187a15364b8fdf8b97d3bcaa0b548261b1813a not found: ID does not exist" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.342524 4922 scope.go:117] "RemoveContainer" containerID="0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f" Sep 29 22:50:46 crc kubenswrapper[4922]: E0929 22:50:46.343075 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f\": container with ID starting with 0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f not found: ID does not exist" containerID="0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.343106 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f"} err="failed to get container status \"0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f\": rpc error: code = NotFound desc = could not find container \"0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f\": container with ID starting with 0003914d6dd8b6d4d404efeed34c6146613d1d68abb3234dc623c357f81a4c2f not found: ID does not exist" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.446951 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4594a140-3321-4a34-ab35-65ad3560b085" path="/var/lib/kubelet/pods/4594a140-3321-4a34-ab35-65ad3560b085/volumes" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.471074 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.580663 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") pod \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.580742 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zjbb\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-kube-api-access-7zjbb\") pod \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.580819 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-lock\") pod \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.580848 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.580881 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-cache\") pod \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\" (UID: \"e0b8c219-afd8-41e5-a9d7-686c7b70fd70\") " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.581656 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-cache" (OuterVolumeSpecName: "cache") pod "e0b8c219-afd8-41e5-a9d7-686c7b70fd70" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.581690 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-lock" (OuterVolumeSpecName: "lock") pod "e0b8c219-afd8-41e5-a9d7-686c7b70fd70" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.586148 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e0b8c219-afd8-41e5-a9d7-686c7b70fd70" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.586243 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-kube-api-access-7zjbb" (OuterVolumeSpecName: "kube-api-access-7zjbb") pod "e0b8c219-afd8-41e5-a9d7-686c7b70fd70" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70"). InnerVolumeSpecName "kube-api-access-7zjbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.592765 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "swift") pod "e0b8c219-afd8-41e5-a9d7-686c7b70fd70" (UID: "e0b8c219-afd8-41e5-a9d7-686c7b70fd70"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.682668 4922 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.682998 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zjbb\" (UniqueName: \"kubernetes.io/projected/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-kube-api-access-7zjbb\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.683016 4922 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-lock\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.683066 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.683084 4922 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e0b8c219-afd8-41e5-a9d7-686c7b70fd70-cache\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.721464 4922 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 29 22:50:46 crc kubenswrapper[4922]: I0929 22:50:46.784756 4922 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.258488 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e0b8c219-afd8-41e5-a9d7-686c7b70fd70","Type":"ContainerDied","Data":"363e32a531e3f6bd3048514b38ed84553dc6087d4cb48d0b444b45d8e462c56f"} Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.258594 4922 scope.go:117] "RemoveContainer" containerID="d96b9721a809407b59045e31403c469338494e50e97df48dd1a0aa74503cb5bd" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.258648 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.306977 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.309107 4922 scope.go:117] "RemoveContainer" containerID="27aeadd45b13c851d87c45f05a21adf10459ae93d03fae69b6ab3347a3cd7d2b" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.315516 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.350915 4922 scope.go:117] "RemoveContainer" containerID="9c2b949ae2010cd19044ec6c16936ffd099b6ea65673b3704a021c3323514b40" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.384874 4922 scope.go:117] "RemoveContainer" containerID="e41c7b951d5523f493d3e44c422eb2f476674ac694ba6e3a443bb314f8068bab" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.418216 4922 scope.go:117] "RemoveContainer" containerID="1f217165de12b63c91e1fbd871ad07d3070b8407a3d9750bde397f3c7a1cc356" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.451324 4922 scope.go:117] "RemoveContainer" containerID="7dc1ecd4e9d792ad830b6b3cddec0aca87a6fc32dfad2067e4fa602b228af523" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.484173 4922 scope.go:117] "RemoveContainer" containerID="6c064216482398df313773fb9964e1b8586650597558efb0a3a312e7dde29596" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.538330 4922 scope.go:117] "RemoveContainer" containerID="54464affff56c245302f16939d7871865704b43ae97eb183cd35b66f93385f35" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.569441 4922 scope.go:117] "RemoveContainer" containerID="aed3f79c8434a0f0105df5fe72412ca9bc5f53d2f122d6b27023c5a8f5c61342" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.602797 4922 scope.go:117] "RemoveContainer" containerID="e249fe7a191a944cff40c8c92e3c4958f89cf9fbd1f5d1322ff75e0f69defdff" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.634899 4922 scope.go:117] "RemoveContainer" containerID="4675d2c0679cc4f58a6d8737c63a65ad973c3433c64759dcea3d5deff22e30fb" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.673554 4922 scope.go:117] "RemoveContainer" containerID="f166640120faeaa707308969f390573411f97a3309e54ac63df05aebb3f19824" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.718403 4922 scope.go:117] "RemoveContainer" containerID="394ac56913d9a5c9d5e8f0211780ebf922fd0554782e59a3d6d87d16da29195d" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.759763 4922 scope.go:117] "RemoveContainer" containerID="d2f084fa2f64aff150659598e27fe358fc89e0c61c6100a7520978fcf0f7a916" Sep 29 22:50:47 crc kubenswrapper[4922]: I0929 22:50:47.796472 4922 scope.go:117] "RemoveContainer" containerID="6e52d87702c312bbb2e29a490519b8aa109bb12950e8b0a94d326f1b63f93999" Sep 29 22:50:48 crc kubenswrapper[4922]: I0929 22:50:48.438701 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" path="/var/lib/kubelet/pods/e0b8c219-afd8-41e5-a9d7-686c7b70fd70/volumes" Sep 29 22:50:58 crc kubenswrapper[4922]: I0929 22:50:58.913289 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:50:58 crc kubenswrapper[4922]: I0929 22:50:58.913721 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:50:58 crc kubenswrapper[4922]: I0929 22:50:58.913799 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:50:58 crc kubenswrapper[4922]: I0929 22:50:58.914754 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"832e8949f1bfe9c9884bbbe72e8107f9a55a105f03d2155b45dfe20f0e514d26"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:50:58 crc kubenswrapper[4922]: I0929 22:50:58.914864 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://832e8949f1bfe9c9884bbbe72e8107f9a55a105f03d2155b45dfe20f0e514d26" gracePeriod=600 Sep 29 22:50:59 crc kubenswrapper[4922]: I0929 22:50:59.393143 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="832e8949f1bfe9c9884bbbe72e8107f9a55a105f03d2155b45dfe20f0e514d26" exitCode=0 Sep 29 22:50:59 crc kubenswrapper[4922]: I0929 22:50:59.393452 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"832e8949f1bfe9c9884bbbe72e8107f9a55a105f03d2155b45dfe20f0e514d26"} Sep 29 22:50:59 crc kubenswrapper[4922]: I0929 22:50:59.393915 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d"} Sep 29 22:50:59 crc kubenswrapper[4922]: I0929 22:50:59.393976 4922 scope.go:117] "RemoveContainer" containerID="2744d35a0efae3434bd56ec391b0648d3824ba8565228dfe9d3610ca7ee648f3" Sep 29 22:51:47 crc kubenswrapper[4922]: I0929 22:51:47.977105 4922 scope.go:117] "RemoveContainer" containerID="73abdf3beef709c2dc64e99c4195dc633f8c2a0e3781acbe2e75749f2f9eeffc" Sep 29 22:51:48 crc kubenswrapper[4922]: I0929 22:51:48.013033 4922 scope.go:117] "RemoveContainer" containerID="5dee3f93cfad466ddb0c5b6ec802c83864fdd84bc637dd69a34af508bfc25b2d" Sep 29 22:51:48 crc kubenswrapper[4922]: I0929 22:51:48.059374 4922 scope.go:117] "RemoveContainer" containerID="0f7068f056c410853b677deba5127eb7577a738fac825eb4b239d2f1371200c6" Sep 29 22:51:48 crc kubenswrapper[4922]: I0929 22:51:48.088745 4922 scope.go:117] "RemoveContainer" containerID="15442b286cfb918d0b410822fb8fe774891b5bb95ac0d45341c5a3d2baaa7d1c" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.226990 4922 scope.go:117] "RemoveContainer" containerID="fb91a14d5de3eb8dcd551bdc88c45ee6cbe872536cded7b69dbe38d418e4ae80" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.307760 4922 scope.go:117] "RemoveContainer" containerID="df2e60aab7e3d8ae2a09de0155bb6512d8a5fcc3b4d52f7a29d46d18974c062f" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.344776 4922 scope.go:117] "RemoveContainer" containerID="6a35ad12e5e174b24a23fdc83a0dc7b755b27cbac17636e646224c0b043489ff" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.378691 4922 scope.go:117] "RemoveContainer" containerID="7a4f35c9026c3a753c4df2911e4a273ed8f0fd7ea9968c172667d0dad986cc9e" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.423913 4922 scope.go:117] "RemoveContainer" containerID="09ecdc04d13e062d78a3247657034bae626cf7a3bb7d11f959a60cb773312999" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.455131 4922 scope.go:117] "RemoveContainer" containerID="08bf9bfd078c072ef33d724c9ee636df566a2fd59a2f592dc6c73284ee0025ac" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.480504 4922 scope.go:117] "RemoveContainer" containerID="57ee8be2a2838ea0f44079b410352c4c9e3e3715c82c96b4a7b5124eccabbe06" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.501778 4922 scope.go:117] "RemoveContainer" containerID="4e669d9dab7924ffde5ba905dfed402d643a8bfbec30bede0e88432df10b3f2b" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.536868 4922 scope.go:117] "RemoveContainer" containerID="c372429d359f29b5fb62f37582e449b4609c3a2ef89b7a71ebeb759be5b01361" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.558253 4922 scope.go:117] "RemoveContainer" containerID="0bedfca35eca2b938823e8ebdd743ac692fe48cb5465b4698b6f9f2ae56de9be" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.593829 4922 scope.go:117] "RemoveContainer" containerID="f53c7b3b9f62eb3961e0c29dd8e4620868ae009185345b663011a716c95d6ad7" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.623721 4922 scope.go:117] "RemoveContainer" containerID="7c2326df9a7a292c4ca8fe772bd0618a690c2b040399447a26c66ca42613a28c" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.708146 4922 scope.go:117] "RemoveContainer" containerID="92e45f2986b77c1ac0d506bd56133b91677b767c782a7f4355197ce4f97ee6fc" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.734595 4922 scope.go:117] "RemoveContainer" containerID="b137252b3fcf80b102a7512521912ab1e7489cb1db512f4c41d4510733941949" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.797563 4922 scope.go:117] "RemoveContainer" containerID="9c6ded9f84dde5456d83aa657b28ed7e35c6018741c5d3cd569e5fede1321c6c" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.826573 4922 scope.go:117] "RemoveContainer" containerID="b7791e3489be4c7ce6ecd6c1df17b8120899bfaa7aeb1826665536cc3f326419" Sep 29 22:52:48 crc kubenswrapper[4922]: I0929 22:52:48.856410 4922 scope.go:117] "RemoveContainer" containerID="a8fb0d82dbf0af1564f72619d645eb92b967e7d87ae89691bd4dfff51a6e825b" Sep 29 22:53:28 crc kubenswrapper[4922]: I0929 22:53:28.913118 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:53:28 crc kubenswrapper[4922]: I0929 22:53:28.913901 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:53:49 crc kubenswrapper[4922]: I0929 22:53:49.211302 4922 scope.go:117] "RemoveContainer" containerID="1e876adec5484e05b077c96b5b512d17c8e7fd01cafe21209613ed2494bca1f4" Sep 29 22:53:49 crc kubenswrapper[4922]: I0929 22:53:49.238767 4922 scope.go:117] "RemoveContainer" containerID="a6f1bb89e5078fd01351e172f87b236110f6a224af2cfb4d3a86fea04668651f" Sep 29 22:53:49 crc kubenswrapper[4922]: I0929 22:53:49.281783 4922 scope.go:117] "RemoveContainer" containerID="ee663f58dc68ad4963e1dfe7771f40417aed4b76cbeb0f64725d0d1c76da2b25" Sep 29 22:53:49 crc kubenswrapper[4922]: I0929 22:53:49.302338 4922 scope.go:117] "RemoveContainer" containerID="b81b755bcfce346f5f3e40095ef94c2f2a4850fd6853f4d78e6341fd3e03fa1a" Sep 29 22:53:49 crc kubenswrapper[4922]: I0929 22:53:49.354854 4922 scope.go:117] "RemoveContainer" containerID="8003474609ae65034e3d0969aa7d08f7334d96674fed267f7823a071e990171f" Sep 29 22:53:49 crc kubenswrapper[4922]: I0929 22:53:49.408927 4922 scope.go:117] "RemoveContainer" containerID="b0601e07e1cda9d47e12b57216a40dab9a4a196e2dbdbe101c52408ab8dbc673" Sep 29 22:53:49 crc kubenswrapper[4922]: I0929 22:53:49.437112 4922 scope.go:117] "RemoveContainer" containerID="7a62e125a44ebba0de06b0bbdee90bdf4a8ec082363e31330dd4e71e609bef7f" Sep 29 22:53:58 crc kubenswrapper[4922]: I0929 22:53:58.912893 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:53:58 crc kubenswrapper[4922]: I0929 22:53:58.913539 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:54:28 crc kubenswrapper[4922]: I0929 22:54:28.913275 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 22:54:28 crc kubenswrapper[4922]: I0929 22:54:28.913996 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 22:54:28 crc kubenswrapper[4922]: I0929 22:54:28.914063 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 22:54:28 crc kubenswrapper[4922]: I0929 22:54:28.914933 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 22:54:28 crc kubenswrapper[4922]: I0929 22:54:28.915030 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" gracePeriod=600 Sep 29 22:54:29 crc kubenswrapper[4922]: E0929 22:54:29.047072 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:54:29 crc kubenswrapper[4922]: I0929 22:54:29.619568 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" exitCode=0 Sep 29 22:54:29 crc kubenswrapper[4922]: I0929 22:54:29.619635 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d"} Sep 29 22:54:29 crc kubenswrapper[4922]: I0929 22:54:29.620028 4922 scope.go:117] "RemoveContainer" containerID="832e8949f1bfe9c9884bbbe72e8107f9a55a105f03d2155b45dfe20f0e514d26" Sep 29 22:54:29 crc kubenswrapper[4922]: I0929 22:54:29.621193 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:54:29 crc kubenswrapper[4922]: E0929 22:54:29.621834 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:54:43 crc kubenswrapper[4922]: I0929 22:54:43.421822 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:54:43 crc kubenswrapper[4922]: E0929 22:54:43.423034 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:54:49 crc kubenswrapper[4922]: I0929 22:54:49.597265 4922 scope.go:117] "RemoveContainer" containerID="48508a82337e926742cebbd69b7faec4f8934755f4c3a2de0b0d1596050a5f5a" Sep 29 22:54:49 crc kubenswrapper[4922]: I0929 22:54:49.626658 4922 scope.go:117] "RemoveContainer" containerID="0905da41990a3b6e384c72d2a0386ef4d77931396bc0710a2e410cb7138434b4" Sep 29 22:54:49 crc kubenswrapper[4922]: I0929 22:54:49.657610 4922 scope.go:117] "RemoveContainer" containerID="4ac9e5d78eb21ed1711732f74e2d50ed5359b5fd04777f879f901b6d931572dc" Sep 29 22:54:49 crc kubenswrapper[4922]: I0929 22:54:49.719123 4922 scope.go:117] "RemoveContainer" containerID="8a4f1a17cc7281e316fc945ca04d33391147a434777cd42f9f8fecaa94adbeb0" Sep 29 22:54:49 crc kubenswrapper[4922]: I0929 22:54:49.781752 4922 scope.go:117] "RemoveContainer" containerID="0eefb0b1bba7e0faa2f93c51c1ba92a9d77c634ccf72229caec29707da0e3782" Sep 29 22:54:49 crc kubenswrapper[4922]: I0929 22:54:49.821984 4922 scope.go:117] "RemoveContainer" containerID="36936cb1aa467c910ae432fb924d1475da2b8ee186bceff2f106a66832818198" Sep 29 22:54:56 crc kubenswrapper[4922]: I0929 22:54:56.431252 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:54:56 crc kubenswrapper[4922]: E0929 22:54:56.433689 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:55:10 crc kubenswrapper[4922]: I0929 22:55:10.423547 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:55:10 crc kubenswrapper[4922]: E0929 22:55:10.424643 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:55:21 crc kubenswrapper[4922]: I0929 22:55:21.422525 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:55:21 crc kubenswrapper[4922]: E0929 22:55:21.423719 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:55:34 crc kubenswrapper[4922]: I0929 22:55:34.422890 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:55:34 crc kubenswrapper[4922]: E0929 22:55:34.423665 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.289496 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bw9cs"] Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290522 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-metadata" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290549 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-metadata" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290572 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" containerName="mysql-bootstrap" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290588 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" containerName="mysql-bootstrap" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290638 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a41c5e41-6db6-44dc-989d-d7a8ed8ae091" containerName="dnsmasq-dns" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290658 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a41c5e41-6db6-44dc-989d-d7a8ed8ae091" containerName="dnsmasq-dns" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290681 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a5d1af0-eb7d-46ad-b4f1-eceb10445896" containerName="nova-cell0-conductor-conductor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290697 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a5d1af0-eb7d-46ad-b4f1-eceb10445896" containerName="nova-cell0-conductor-conductor" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290715 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server-init" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290730 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server-init" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290766 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="rsync" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290784 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="rsync" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290818 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0a467a9-053f-4f41-b6b2-529130d42122" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290834 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0a467a9-053f-4f41-b6b2-529130d42122" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290868 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bce38540-6796-48b5-82e7-aad30cf98841" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290886 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="bce38540-6796-48b5-82e7-aad30cf98841" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290908 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-auditor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290924 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-auditor" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290943 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-reaper" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.290959 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-reaper" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.290990 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb84f99c-6d00-4023-9520-372992f3646e" containerName="neutron-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291005 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb84f99c-6d00-4023-9520-372992f3646e" containerName="neutron-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291020 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerName="probe" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291035 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerName="probe" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291058 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-auditor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291075 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-auditor" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291094 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b8254ca-83c1-49a8-b453-107577b54f01" containerName="glance-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291110 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b8254ca-83c1-49a8-b453-107577b54f01" containerName="glance-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291128 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291143 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-server" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291158 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="sg-core" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291173 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="sg-core" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291190 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51f81c86-8f6d-4506-a940-5015032df5bd" containerName="placement-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291209 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="51f81c86-8f6d-4506-a940-5015032df5bd" containerName="placement-log" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291235 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe7291a-aae6-4a8f-9f46-fa4594582dfe" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291250 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe7291a-aae6-4a8f-9f46-fa4594582dfe" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291274 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2c58690-3113-44b8-b2df-cbe69dbd26e3" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291289 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2c58690-3113-44b8-b2df-cbe69dbd26e3" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291312 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291326 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291356 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="447099dc-1eea-4510-8b94-faa6899f6b06" containerName="glance-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291371 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="447099dc-1eea-4510-8b94-faa6899f6b06" containerName="glance-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291433 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bce38540-6796-48b5-82e7-aad30cf98841" containerName="ovsdbserver-nb" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291451 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="bce38540-6796-48b5-82e7-aad30cf98841" containerName="ovsdbserver-nb" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291480 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" containerName="galera" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291496 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" containerName="galera" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291528 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291544 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-server" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291565 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291582 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291611 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" containerName="mysql-bootstrap" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291627 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" containerName="mysql-bootstrap" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291650 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerName="barbican-keystone-listener" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291666 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerName="barbican-keystone-listener" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291713 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerName="cinder-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291732 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerName="cinder-api" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291759 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e56d31de-64f5-42a7-8243-7ac6d992a03d" containerName="setup-container" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291779 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e56d31de-64f5-42a7-8243-7ac6d992a03d" containerName="setup-container" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291807 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291823 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291845 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c5eef11-d4e0-43cd-b305-c427f85d173a" containerName="kube-state-metrics" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291862 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c5eef11-d4e0-43cd-b305-c427f85d173a" containerName="kube-state-metrics" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291884 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerName="ovsdbserver-sb" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291900 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerName="ovsdbserver-sb" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291928 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-replicator" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291943 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-replicator" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.291972 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51f81c86-8f6d-4506-a940-5015032df5bd" containerName="placement-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.291989 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="51f81c86-8f6d-4506-a940-5015032df5bd" containerName="placement-api" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292021 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-updater" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292037 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-updater" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292056 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cab5f5be-6bdd-481b-a07b-08491f6f2be5" containerName="rabbitmq" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292071 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cab5f5be-6bdd-481b-a07b-08491f6f2be5" containerName="rabbitmq" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292085 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-auditor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292101 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-auditor" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292119 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf1c4a85-458f-4412-ae77-af6d87370b62" containerName="nova-cell1-conductor-conductor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292136 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf1c4a85-458f-4412-ae77-af6d87370b62" containerName="nova-cell1-conductor-conductor" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292155 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerName="proxy-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292171 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerName="proxy-server" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292194 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="ceilometer-central-agent" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292209 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="ceilometer-central-agent" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292230 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-replicator" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292246 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-replicator" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292274 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0082d9b7-4b81-47ca-8ba7-61429fdcc678" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292290 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0082d9b7-4b81-47ca-8ba7-61429fdcc678" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292310 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98ffad34-9721-4849-84ba-f14c518250ac" containerName="barbican-worker" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292322 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="98ffad34-9721-4849-84ba-f14c518250ac" containerName="barbican-worker" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292343 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerName="cinder-api-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292355 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerName="cinder-api-log" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292373 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cab5f5be-6bdd-481b-a07b-08491f6f2be5" containerName="setup-container" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292385 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cab5f5be-6bdd-481b-a07b-08491f6f2be5" containerName="setup-container" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292442 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292460 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292512 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292525 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-log" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292541 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292552 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292571 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="proxy-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292582 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="proxy-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292596 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerName="barbican-keystone-listener-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292608 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerName="barbican-keystone-listener-log" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292631 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="447099dc-1eea-4510-8b94-faa6899f6b06" containerName="glance-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292644 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="447099dc-1eea-4510-8b94-faa6899f6b06" containerName="glance-log" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292666 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1654e799-40ef-413a-8324-bb5b4f7a8f17" containerName="memcached" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292678 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1654e799-40ef-413a-8324-bb5b4f7a8f17" containerName="memcached" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292694 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb84f99c-6d00-4023-9520-372992f3646e" containerName="neutron-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292707 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb84f99c-6d00-4023-9520-372992f3646e" containerName="neutron-api" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292720 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a41c5e41-6db6-44dc-989d-d7a8ed8ae091" containerName="init" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292732 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a41c5e41-6db6-44dc-989d-d7a8ed8ae091" containerName="init" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292757 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ead89b5-3aff-47b9-9516-0eaa33dca7aa" containerName="nova-scheduler-scheduler" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292769 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ead89b5-3aff-47b9-9516-0eaa33dca7aa" containerName="nova-scheduler-scheduler" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292790 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd700631-7b12-4e93-9e40-747b09623e7e" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292803 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd700631-7b12-4e93-9e40-747b09623e7e" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292817 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19fc05a2-d210-4c05-8341-eafdbcc40dc1" containerName="keystone-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292829 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="19fc05a2-d210-4c05-8341-eafdbcc40dc1" containerName="keystone-api" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292843 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292856 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-log" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292874 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e56d31de-64f5-42a7-8243-7ac6d992a03d" containerName="rabbitmq" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292910 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e56d31de-64f5-42a7-8243-7ac6d992a03d" containerName="rabbitmq" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292929 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-updater" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292941 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-updater" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292954 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292966 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-api" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.292979 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" containerName="galera" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.292990 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" containerName="galera" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293005 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="ceilometer-notification-agent" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293018 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="ceilometer-notification-agent" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293035 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98ffad34-9721-4849-84ba-f14c518250ac" containerName="barbican-worker-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293046 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="98ffad34-9721-4849-84ba-f14c518250ac" containerName="barbican-worker-log" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293063 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38d4f6b-b91d-4bb8-9e78-54261e6a285e" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293075 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38d4f6b-b91d-4bb8-9e78-54261e6a285e" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293095 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293107 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-server" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293129 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293140 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api-log" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293157 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="swift-recon-cron" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293170 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="swift-recon-cron" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293190 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="217b822b-44c6-465e-982a-23fa07d94b58" containerName="ovn-controller" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293202 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="217b822b-44c6-465e-982a-23fa07d94b58" containerName="ovn-controller" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293219 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-replicator" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293231 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-replicator" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293244 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b8254ca-83c1-49a8-b453-107577b54f01" containerName="glance-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293256 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b8254ca-83c1-49a8-b453-107577b54f01" containerName="glance-log" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293276 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerName="proxy-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293288 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerName="proxy-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293305 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerName="cinder-scheduler" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293317 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerName="cinder-scheduler" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293337 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-expirer" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293349 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-expirer" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293370 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f689cf2-292c-47a9-936d-57954d187f5d" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293385 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f689cf2-292c-47a9-936d-57954d187f5d" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: E0929 22:55:44.293440 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="ovn-northd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293454 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="ovn-northd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293700 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerName="proxy-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293726 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293739 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2c58690-3113-44b8-b2df-cbe69dbd26e3" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293754 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf1c4a85-458f-4412-ae77-af6d87370b62" containerName="nova-cell1-conductor-conductor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293766 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="proxy-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293784 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfe7291a-aae6-4a8f-9f46-fa4594582dfe" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293811 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa85a019-83a6-4b71-abdb-7144be0105ae" containerName="galera" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293833 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovs-vswitchd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293852 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="014a5aba-d41a-4647-8459-c770534a4a60" containerName="ovn-northd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293871 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="cab5f5be-6bdd-481b-a07b-08491f6f2be5" containerName="rabbitmq" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293885 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-updater" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293908 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b8254ca-83c1-49a8-b453-107577b54f01" containerName="glance-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293923 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerName="cinder-scheduler" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293942 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-auditor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293956 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="866ac5e5-219a-4afa-b6b3-0ca293c81f1d" containerName="galera" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293969 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="51f81c86-8f6d-4506-a940-5015032df5bd" containerName="placement-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.293987 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a5d1af0-eb7d-46ad-b4f1-eceb10445896" containerName="nova-cell0-conductor-conductor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294009 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e56d31de-64f5-42a7-8243-7ac6d992a03d" containerName="rabbitmq" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294022 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294042 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4594a140-3321-4a34-ab35-65ad3560b085" containerName="ovsdb-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294054 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="447099dc-1eea-4510-8b94-faa6899f6b06" containerName="glance-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294071 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ead89b5-3aff-47b9-9516-0eaa33dca7aa" containerName="nova-scheduler-scheduler" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294086 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0a467a9-053f-4f41-b6b2-529130d42122" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294104 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="swift-recon-cron" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294118 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-replicator" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294134 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd700631-7b12-4e93-9e40-747b09623e7e" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294150 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f689cf2-292c-47a9-936d-57954d187f5d" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294169 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerName="cinder-api-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294185 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-replicator" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294205 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-updater" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294223 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-auditor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294239 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerName="barbican-keystone-listener-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294254 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="rsync" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294270 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-replicator" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294286 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0082d9b7-4b81-47ca-8ba7-61429fdcc678" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294303 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a41c5e41-6db6-44dc-989d-d7a8ed8ae091" containerName="dnsmasq-dns" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294323 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294342 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerName="ovsdbserver-sb" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294358 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="bce38540-6796-48b5-82e7-aad30cf98841" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294372 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294386 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="391e4250-b978-4ce4-811d-ae2a81a8500f" containerName="nova-api-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294441 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="447099dc-1eea-4510-8b94-faa6899f6b06" containerName="glance-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294458 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-metadata" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294476 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="ceilometer-notification-agent" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294495 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="98ffad34-9721-4849-84ba-f14c518250ac" containerName="barbican-worker-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294508 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38d4f6b-b91d-4bb8-9e78-54261e6a285e" containerName="mariadb-account-delete" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294528 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-auditor" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294543 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="container-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294557 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8788e59c-0cd3-43c5-8591-d452f9cb083a" containerName="cinder-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294573 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e448a92-1d3a-4bf6-a7f4-dbedb7b829d3" containerName="barbican-api-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294587 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a7323e3-8b0e-4f74-b0f4-73c5874fe361" containerName="openstack-network-exporter" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294602 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="object-expirer" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294615 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4799fcf9-24e7-4c61-9e5e-109105ec7003" containerName="barbican-keystone-listener" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294630 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c5eef11-d4e0-43cd-b305-c427f85d173a" containerName="kube-state-metrics" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294645 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="51f81c86-8f6d-4506-a940-5015032df5bd" containerName="placement-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294660 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6e07568-09d3-4a0f-a3b6-8b6df0f89cbb" containerName="proxy-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294677 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="ceilometer-central-agent" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294691 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1654e799-40ef-413a-8324-bb5b4f7a8f17" containerName="memcached" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294706 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0617fd51-e6f8-4cb7-8b63-cadf8ddaf031" containerName="sg-core" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294723 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-server" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294739 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b8c219-afd8-41e5-a9d7-686c7b70fd70" containerName="account-reaper" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294756 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb84f99c-6d00-4023-9520-372992f3646e" containerName="neutron-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294777 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="98ffad34-9721-4849-84ba-f14c518250ac" containerName="barbican-worker" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294791 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb84f99c-6d00-4023-9520-372992f3646e" containerName="neutron-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294805 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="bce38540-6796-48b5-82e7-aad30cf98841" containerName="ovsdbserver-nb" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294820 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="19fc05a2-d210-4c05-8341-eafdbcc40dc1" containerName="keystone-api" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294837 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b8254ca-83c1-49a8-b453-107577b54f01" containerName="glance-httpd" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294852 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="217b822b-44c6-465e-982a-23fa07d94b58" containerName="ovn-controller" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294869 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b88af2db-0d8c-4d78-b1d9-5fb3c28c5e7b" containerName="probe" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.294889 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8026992e-7dd1-42d9-b362-82febc75c072" containerName="nova-metadata-log" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.296647 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.326184 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bw9cs"] Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.385708 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-catalog-content\") pod \"community-operators-bw9cs\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.385788 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-utilities\") pod \"community-operators-bw9cs\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.386008 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxq2b\" (UniqueName: \"kubernetes.io/projected/80df8766-f9be-4d5e-855c-0bdc1d2468cc-kube-api-access-zxq2b\") pod \"community-operators-bw9cs\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.487173 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-utilities\") pod \"community-operators-bw9cs\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.487267 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxq2b\" (UniqueName: \"kubernetes.io/projected/80df8766-f9be-4d5e-855c-0bdc1d2468cc-kube-api-access-zxq2b\") pod \"community-operators-bw9cs\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.487450 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-catalog-content\") pod \"community-operators-bw9cs\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.487961 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-utilities\") pod \"community-operators-bw9cs\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.488450 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-catalog-content\") pod \"community-operators-bw9cs\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.514696 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxq2b\" (UniqueName: \"kubernetes.io/projected/80df8766-f9be-4d5e-855c-0bdc1d2468cc-kube-api-access-zxq2b\") pod \"community-operators-bw9cs\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:44 crc kubenswrapper[4922]: I0929 22:55:44.626300 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:45 crc kubenswrapper[4922]: I0929 22:55:45.163985 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bw9cs"] Sep 29 22:55:45 crc kubenswrapper[4922]: I0929 22:55:45.398141 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bw9cs" event={"ID":"80df8766-f9be-4d5e-855c-0bdc1d2468cc","Type":"ContainerStarted","Data":"7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d"} Sep 29 22:55:45 crc kubenswrapper[4922]: I0929 22:55:45.398452 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bw9cs" event={"ID":"80df8766-f9be-4d5e-855c-0bdc1d2468cc","Type":"ContainerStarted","Data":"f5cd7305f6ebc6548db817aa7a428885747d49b6dba8191f11df04c517b40c8a"} Sep 29 22:55:45 crc kubenswrapper[4922]: I0929 22:55:45.421908 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:55:45 crc kubenswrapper[4922]: E0929 22:55:45.422494 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:55:46 crc kubenswrapper[4922]: I0929 22:55:46.409997 4922 generic.go:334] "Generic (PLEG): container finished" podID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerID="7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d" exitCode=0 Sep 29 22:55:46 crc kubenswrapper[4922]: I0929 22:55:46.410094 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bw9cs" event={"ID":"80df8766-f9be-4d5e-855c-0bdc1d2468cc","Type":"ContainerDied","Data":"7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d"} Sep 29 22:55:46 crc kubenswrapper[4922]: I0929 22:55:46.412668 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 22:55:47 crc kubenswrapper[4922]: I0929 22:55:47.425235 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bw9cs" event={"ID":"80df8766-f9be-4d5e-855c-0bdc1d2468cc","Type":"ContainerStarted","Data":"d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552"} Sep 29 22:55:48 crc kubenswrapper[4922]: I0929 22:55:48.439323 4922 generic.go:334] "Generic (PLEG): container finished" podID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerID="d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552" exitCode=0 Sep 29 22:55:48 crc kubenswrapper[4922]: I0929 22:55:48.439457 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bw9cs" event={"ID":"80df8766-f9be-4d5e-855c-0bdc1d2468cc","Type":"ContainerDied","Data":"d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552"} Sep 29 22:55:49 crc kubenswrapper[4922]: I0929 22:55:49.453281 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bw9cs" event={"ID":"80df8766-f9be-4d5e-855c-0bdc1d2468cc","Type":"ContainerStarted","Data":"000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422"} Sep 29 22:55:49 crc kubenswrapper[4922]: I0929 22:55:49.486118 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bw9cs" podStartSLOduration=2.899122566 podStartE2EDuration="5.486089654s" podCreationTimestamp="2025-09-29 22:55:44 +0000 UTC" firstStartedPulling="2025-09-29 22:55:46.41227211 +0000 UTC m=+1750.722560953" lastFinishedPulling="2025-09-29 22:55:48.999239188 +0000 UTC m=+1753.309528041" observedRunningTime="2025-09-29 22:55:49.480577389 +0000 UTC m=+1753.790866332" watchObservedRunningTime="2025-09-29 22:55:49.486089654 +0000 UTC m=+1753.796378497" Sep 29 22:55:49 crc kubenswrapper[4922]: I0929 22:55:49.927411 4922 scope.go:117] "RemoveContainer" containerID="f77d0a3f2b3eb711b2f8c5cdc56a0b17663397edcfbabb2c2ea7245deb82d352" Sep 29 22:55:54 crc kubenswrapper[4922]: I0929 22:55:54.627459 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:54 crc kubenswrapper[4922]: I0929 22:55:54.628728 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:54 crc kubenswrapper[4922]: I0929 22:55:54.710046 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:55 crc kubenswrapper[4922]: I0929 22:55:55.562469 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:55 crc kubenswrapper[4922]: I0929 22:55:55.625150 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bw9cs"] Sep 29 22:55:57 crc kubenswrapper[4922]: I0929 22:55:57.421824 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:55:57 crc kubenswrapper[4922]: E0929 22:55:57.422214 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:55:57 crc kubenswrapper[4922]: I0929 22:55:57.512285 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bw9cs" podUID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerName="registry-server" containerID="cri-o://000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422" gracePeriod=2 Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.052563 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.151769 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxq2b\" (UniqueName: \"kubernetes.io/projected/80df8766-f9be-4d5e-855c-0bdc1d2468cc-kube-api-access-zxq2b\") pod \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.151891 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-catalog-content\") pod \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.151942 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-utilities\") pod \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\" (UID: \"80df8766-f9be-4d5e-855c-0bdc1d2468cc\") " Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.153289 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-utilities" (OuterVolumeSpecName: "utilities") pod "80df8766-f9be-4d5e-855c-0bdc1d2468cc" (UID: "80df8766-f9be-4d5e-855c-0bdc1d2468cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.169601 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80df8766-f9be-4d5e-855c-0bdc1d2468cc-kube-api-access-zxq2b" (OuterVolumeSpecName: "kube-api-access-zxq2b") pod "80df8766-f9be-4d5e-855c-0bdc1d2468cc" (UID: "80df8766-f9be-4d5e-855c-0bdc1d2468cc"). InnerVolumeSpecName "kube-api-access-zxq2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.253749 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxq2b\" (UniqueName: \"kubernetes.io/projected/80df8766-f9be-4d5e-855c-0bdc1d2468cc-kube-api-access-zxq2b\") on node \"crc\" DevicePath \"\"" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.253786 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.524319 4922 generic.go:334] "Generic (PLEG): container finished" podID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerID="000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422" exitCode=0 Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.524439 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bw9cs" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.526560 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bw9cs" event={"ID":"80df8766-f9be-4d5e-855c-0bdc1d2468cc","Type":"ContainerDied","Data":"000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422"} Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.526676 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bw9cs" event={"ID":"80df8766-f9be-4d5e-855c-0bdc1d2468cc","Type":"ContainerDied","Data":"f5cd7305f6ebc6548db817aa7a428885747d49b6dba8191f11df04c517b40c8a"} Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.526714 4922 scope.go:117] "RemoveContainer" containerID="000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.560737 4922 scope.go:117] "RemoveContainer" containerID="d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.569768 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "80df8766-f9be-4d5e-855c-0bdc1d2468cc" (UID: "80df8766-f9be-4d5e-855c-0bdc1d2468cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.591638 4922 scope.go:117] "RemoveContainer" containerID="7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.620447 4922 scope.go:117] "RemoveContainer" containerID="000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422" Sep 29 22:55:58 crc kubenswrapper[4922]: E0929 22:55:58.621199 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422\": container with ID starting with 000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422 not found: ID does not exist" containerID="000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.621246 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422"} err="failed to get container status \"000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422\": rpc error: code = NotFound desc = could not find container \"000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422\": container with ID starting with 000bac468849f429811bfb4071fbb6303cb035eace3ececdb248331a469b0422 not found: ID does not exist" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.621377 4922 scope.go:117] "RemoveContainer" containerID="d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552" Sep 29 22:55:58 crc kubenswrapper[4922]: E0929 22:55:58.622015 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552\": container with ID starting with d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552 not found: ID does not exist" containerID="d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.622101 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552"} err="failed to get container status \"d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552\": rpc error: code = NotFound desc = could not find container \"d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552\": container with ID starting with d2bd0f4e883c7c8592f7d9ec6d2d534aef1e96b44b612ac837d9df70a8264552 not found: ID does not exist" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.622132 4922 scope.go:117] "RemoveContainer" containerID="7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d" Sep 29 22:55:58 crc kubenswrapper[4922]: E0929 22:55:58.622748 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d\": container with ID starting with 7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d not found: ID does not exist" containerID="7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.622799 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d"} err="failed to get container status \"7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d\": rpc error: code = NotFound desc = could not find container \"7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d\": container with ID starting with 7cf87639f7501d5be0b5373a5951f93f138358d5f2b4fdb11d613c211f43405d not found: ID does not exist" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.660984 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/80df8766-f9be-4d5e-855c-0bdc1d2468cc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.883136 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bw9cs"] Sep 29 22:55:58 crc kubenswrapper[4922]: I0929 22:55:58.892863 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bw9cs"] Sep 29 22:56:00 crc kubenswrapper[4922]: I0929 22:56:00.437071 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" path="/var/lib/kubelet/pods/80df8766-f9be-4d5e-855c-0bdc1d2468cc/volumes" Sep 29 22:56:12 crc kubenswrapper[4922]: I0929 22:56:12.422361 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:56:12 crc kubenswrapper[4922]: E0929 22:56:12.424154 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:56:24 crc kubenswrapper[4922]: I0929 22:56:24.421930 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:56:24 crc kubenswrapper[4922]: E0929 22:56:24.422758 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:56:36 crc kubenswrapper[4922]: I0929 22:56:36.442002 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:56:36 crc kubenswrapper[4922]: E0929 22:56:36.442816 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:56:50 crc kubenswrapper[4922]: I0929 22:56:50.026182 4922 scope.go:117] "RemoveContainer" containerID="7f7d7eba3fa2e00ed7439126d30f059863a1f9a8c6d95ea44f9d8d23d062b615" Sep 29 22:56:50 crc kubenswrapper[4922]: I0929 22:56:50.065743 4922 scope.go:117] "RemoveContainer" containerID="a619712671e1ee8156ae552fb5f6eff0cfb3c1fee263e8762a56b0dafcc7ed41" Sep 29 22:56:51 crc kubenswrapper[4922]: I0929 22:56:51.423175 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:56:51 crc kubenswrapper[4922]: E0929 22:56:51.423558 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:57:05 crc kubenswrapper[4922]: I0929 22:57:05.422689 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:57:05 crc kubenswrapper[4922]: E0929 22:57:05.423568 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:57:20 crc kubenswrapper[4922]: I0929 22:57:20.422196 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:57:20 crc kubenswrapper[4922]: E0929 22:57:20.423244 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:57:31 crc kubenswrapper[4922]: I0929 22:57:31.421827 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:57:31 crc kubenswrapper[4922]: E0929 22:57:31.423793 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:57:46 crc kubenswrapper[4922]: I0929 22:57:46.430594 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:57:46 crc kubenswrapper[4922]: E0929 22:57:46.431594 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:58:00 crc kubenswrapper[4922]: I0929 22:58:00.422872 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:58:00 crc kubenswrapper[4922]: E0929 22:58:00.423852 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:58:13 crc kubenswrapper[4922]: I0929 22:58:13.422169 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:58:13 crc kubenswrapper[4922]: E0929 22:58:13.423285 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:58:24 crc kubenswrapper[4922]: I0929 22:58:24.422798 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:58:24 crc kubenswrapper[4922]: E0929 22:58:24.423911 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:58:38 crc kubenswrapper[4922]: I0929 22:58:38.422517 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:58:38 crc kubenswrapper[4922]: E0929 22:58:38.423618 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:58:50 crc kubenswrapper[4922]: I0929 22:58:50.423053 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:58:50 crc kubenswrapper[4922]: E0929 22:58:50.424037 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:59:05 crc kubenswrapper[4922]: I0929 22:59:05.422807 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:59:05 crc kubenswrapper[4922]: E0929 22:59:05.423987 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:59:17 crc kubenswrapper[4922]: I0929 22:59:17.423068 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:59:17 crc kubenswrapper[4922]: E0929 22:59:17.429702 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:59:28 crc kubenswrapper[4922]: I0929 22:59:28.421889 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:59:28 crc kubenswrapper[4922]: E0929 22:59:28.422816 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 22:59:41 crc kubenswrapper[4922]: I0929 22:59:41.422600 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 22:59:41 crc kubenswrapper[4922]: I0929 22:59:41.689762 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"a0e81052cb40277600e3b60604bc5a66510fa0b4da4e03a262d1b5100500286b"} Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.148677 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h"] Sep 29 23:00:00 crc kubenswrapper[4922]: E0929 23:00:00.149611 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerName="extract-content" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.149630 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerName="extract-content" Sep 29 23:00:00 crc kubenswrapper[4922]: E0929 23:00:00.149646 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerName="extract-utilities" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.149656 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerName="extract-utilities" Sep 29 23:00:00 crc kubenswrapper[4922]: E0929 23:00:00.149691 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerName="registry-server" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.149700 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerName="registry-server" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.149882 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="80df8766-f9be-4d5e-855c-0bdc1d2468cc" containerName="registry-server" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.150738 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.160330 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.160806 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.165801 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h"] Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.247378 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a3a0308-d028-4931-a959-c68dd8b2db37-config-volume\") pod \"collect-profiles-29319780-vld8h\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.247569 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a3a0308-d028-4931-a959-c68dd8b2db37-secret-volume\") pod \"collect-profiles-29319780-vld8h\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.247781 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txxf4\" (UniqueName: \"kubernetes.io/projected/1a3a0308-d028-4931-a959-c68dd8b2db37-kube-api-access-txxf4\") pod \"collect-profiles-29319780-vld8h\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.349193 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a3a0308-d028-4931-a959-c68dd8b2db37-secret-volume\") pod \"collect-profiles-29319780-vld8h\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.349328 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txxf4\" (UniqueName: \"kubernetes.io/projected/1a3a0308-d028-4931-a959-c68dd8b2db37-kube-api-access-txxf4\") pod \"collect-profiles-29319780-vld8h\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.349532 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a3a0308-d028-4931-a959-c68dd8b2db37-config-volume\") pod \"collect-profiles-29319780-vld8h\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.351154 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a3a0308-d028-4931-a959-c68dd8b2db37-config-volume\") pod \"collect-profiles-29319780-vld8h\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.368311 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a3a0308-d028-4931-a959-c68dd8b2db37-secret-volume\") pod \"collect-profiles-29319780-vld8h\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.370461 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txxf4\" (UniqueName: \"kubernetes.io/projected/1a3a0308-d028-4931-a959-c68dd8b2db37-kube-api-access-txxf4\") pod \"collect-profiles-29319780-vld8h\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.489780 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.761734 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h"] Sep 29 23:00:00 crc kubenswrapper[4922]: W0929 23:00:00.764819 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a3a0308_d028_4931_a959_c68dd8b2db37.slice/crio-4eda8c4be141a7dddb7e317e3f79b3332bd937251eb9c4044b62d203a11d2b56 WatchSource:0}: Error finding container 4eda8c4be141a7dddb7e317e3f79b3332bd937251eb9c4044b62d203a11d2b56: Status 404 returned error can't find the container with id 4eda8c4be141a7dddb7e317e3f79b3332bd937251eb9c4044b62d203a11d2b56 Sep 29 23:00:00 crc kubenswrapper[4922]: I0929 23:00:00.863004 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" event={"ID":"1a3a0308-d028-4931-a959-c68dd8b2db37","Type":"ContainerStarted","Data":"4eda8c4be141a7dddb7e317e3f79b3332bd937251eb9c4044b62d203a11d2b56"} Sep 29 23:00:01 crc kubenswrapper[4922]: I0929 23:00:01.875546 4922 generic.go:334] "Generic (PLEG): container finished" podID="1a3a0308-d028-4931-a959-c68dd8b2db37" containerID="2c17b313cc66290d86db66ac133a7bbc85795c0b5bfd4801035cc184ca0a7e20" exitCode=0 Sep 29 23:00:01 crc kubenswrapper[4922]: I0929 23:00:01.875708 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" event={"ID":"1a3a0308-d028-4931-a959-c68dd8b2db37","Type":"ContainerDied","Data":"2c17b313cc66290d86db66ac133a7bbc85795c0b5bfd4801035cc184ca0a7e20"} Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.228651 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.302497 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a3a0308-d028-4931-a959-c68dd8b2db37-secret-volume\") pod \"1a3a0308-d028-4931-a959-c68dd8b2db37\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.302563 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txxf4\" (UniqueName: \"kubernetes.io/projected/1a3a0308-d028-4931-a959-c68dd8b2db37-kube-api-access-txxf4\") pod \"1a3a0308-d028-4931-a959-c68dd8b2db37\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.302806 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a3a0308-d028-4931-a959-c68dd8b2db37-config-volume\") pod \"1a3a0308-d028-4931-a959-c68dd8b2db37\" (UID: \"1a3a0308-d028-4931-a959-c68dd8b2db37\") " Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.304626 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a3a0308-d028-4931-a959-c68dd8b2db37-config-volume" (OuterVolumeSpecName: "config-volume") pod "1a3a0308-d028-4931-a959-c68dd8b2db37" (UID: "1a3a0308-d028-4931-a959-c68dd8b2db37"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.309654 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a3a0308-d028-4931-a959-c68dd8b2db37-kube-api-access-txxf4" (OuterVolumeSpecName: "kube-api-access-txxf4") pod "1a3a0308-d028-4931-a959-c68dd8b2db37" (UID: "1a3a0308-d028-4931-a959-c68dd8b2db37"). InnerVolumeSpecName "kube-api-access-txxf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.310477 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a3a0308-d028-4931-a959-c68dd8b2db37-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1a3a0308-d028-4931-a959-c68dd8b2db37" (UID: "1a3a0308-d028-4931-a959-c68dd8b2db37"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.404430 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a3a0308-d028-4931-a959-c68dd8b2db37-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.404467 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txxf4\" (UniqueName: \"kubernetes.io/projected/1a3a0308-d028-4931-a959-c68dd8b2db37-kube-api-access-txxf4\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.404483 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a3a0308-d028-4931-a959-c68dd8b2db37-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.895376 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" event={"ID":"1a3a0308-d028-4931-a959-c68dd8b2db37","Type":"ContainerDied","Data":"4eda8c4be141a7dddb7e317e3f79b3332bd937251eb9c4044b62d203a11d2b56"} Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.895470 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4eda8c4be141a7dddb7e317e3f79b3332bd937251eb9c4044b62d203a11d2b56" Sep 29 23:00:03 crc kubenswrapper[4922]: I0929 23:00:03.895503 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h" Sep 29 23:00:04 crc kubenswrapper[4922]: I0929 23:00:04.320752 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb"] Sep 29 23:00:04 crc kubenswrapper[4922]: I0929 23:00:04.330309 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319735-w4qgb"] Sep 29 23:00:04 crc kubenswrapper[4922]: I0929 23:00:04.439512 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4499e978-6c8b-4d19-98da-9067d3d01ad8" path="/var/lib/kubelet/pods/4499e978-6c8b-4d19-98da-9067d3d01ad8/volumes" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.716443 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lfhnx"] Sep 29 23:00:22 crc kubenswrapper[4922]: E0929 23:00:22.718313 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a3a0308-d028-4931-a959-c68dd8b2db37" containerName="collect-profiles" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.718341 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a3a0308-d028-4931-a959-c68dd8b2db37" containerName="collect-profiles" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.719607 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a3a0308-d028-4931-a959-c68dd8b2db37" containerName="collect-profiles" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.723989 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.753857 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lfhnx"] Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.839947 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-catalog-content\") pod \"certified-operators-lfhnx\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.840012 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjmnk\" (UniqueName: \"kubernetes.io/projected/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-kube-api-access-gjmnk\") pod \"certified-operators-lfhnx\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.840089 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-utilities\") pod \"certified-operators-lfhnx\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.916960 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gvwn6"] Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.920235 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.926153 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gvwn6"] Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.941773 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-utilities\") pod \"certified-operators-lfhnx\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.941863 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-catalog-content\") pod \"certified-operators-lfhnx\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.941901 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjmnk\" (UniqueName: \"kubernetes.io/projected/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-kube-api-access-gjmnk\") pod \"certified-operators-lfhnx\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.941957 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-utilities\") pod \"redhat-marketplace-gvwn6\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.941996 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-catalog-content\") pod \"redhat-marketplace-gvwn6\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.942022 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvhkp\" (UniqueName: \"kubernetes.io/projected/9431eea3-dd10-4635-962e-6431d35ce55c-kube-api-access-vvhkp\") pod \"redhat-marketplace-gvwn6\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.942601 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-utilities\") pod \"certified-operators-lfhnx\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.943619 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-catalog-content\") pod \"certified-operators-lfhnx\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:22 crc kubenswrapper[4922]: I0929 23:00:22.972219 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjmnk\" (UniqueName: \"kubernetes.io/projected/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-kube-api-access-gjmnk\") pod \"certified-operators-lfhnx\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.042805 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-utilities\") pod \"redhat-marketplace-gvwn6\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.043256 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-catalog-content\") pod \"redhat-marketplace-gvwn6\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.043284 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvhkp\" (UniqueName: \"kubernetes.io/projected/9431eea3-dd10-4635-962e-6431d35ce55c-kube-api-access-vvhkp\") pod \"redhat-marketplace-gvwn6\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.044148 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-catalog-content\") pod \"redhat-marketplace-gvwn6\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.044343 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-utilities\") pod \"redhat-marketplace-gvwn6\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.062743 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvhkp\" (UniqueName: \"kubernetes.io/projected/9431eea3-dd10-4635-962e-6431d35ce55c-kube-api-access-vvhkp\") pod \"redhat-marketplace-gvwn6\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.082640 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.246822 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.571995 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gvwn6"] Sep 29 23:00:23 crc kubenswrapper[4922]: I0929 23:00:23.600505 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lfhnx"] Sep 29 23:00:24 crc kubenswrapper[4922]: I0929 23:00:24.121902 4922 generic.go:334] "Generic (PLEG): container finished" podID="9431eea3-dd10-4635-962e-6431d35ce55c" containerID="b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4" exitCode=0 Sep 29 23:00:24 crc kubenswrapper[4922]: I0929 23:00:24.121961 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gvwn6" event={"ID":"9431eea3-dd10-4635-962e-6431d35ce55c","Type":"ContainerDied","Data":"b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4"} Sep 29 23:00:24 crc kubenswrapper[4922]: I0929 23:00:24.122017 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gvwn6" event={"ID":"9431eea3-dd10-4635-962e-6431d35ce55c","Type":"ContainerStarted","Data":"09d920259d64d78c80f41abb546fdd35786fded780000491d84c2f193fdfe84a"} Sep 29 23:00:24 crc kubenswrapper[4922]: I0929 23:00:24.125660 4922 generic.go:334] "Generic (PLEG): container finished" podID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerID="d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c" exitCode=0 Sep 29 23:00:24 crc kubenswrapper[4922]: I0929 23:00:24.125710 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfhnx" event={"ID":"b981b625-eabb-4d14-bb8f-6f350e0a0bb3","Type":"ContainerDied","Data":"d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c"} Sep 29 23:00:24 crc kubenswrapper[4922]: I0929 23:00:24.125739 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfhnx" event={"ID":"b981b625-eabb-4d14-bb8f-6f350e0a0bb3","Type":"ContainerStarted","Data":"adb99625c24be6074c0d9080ff60211cfbf9533e8c40265e2768232cf1c1389a"} Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.112229 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tr6r7"] Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.116078 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.137521 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tr6r7"] Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.178197 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43844139-65aa-433b-9670-008ab6c350e3-catalog-content\") pod \"redhat-operators-tr6r7\" (UID: \"43844139-65aa-433b-9670-008ab6c350e3\") " pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.178352 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43844139-65aa-433b-9670-008ab6c350e3-utilities\") pod \"redhat-operators-tr6r7\" (UID: \"43844139-65aa-433b-9670-008ab6c350e3\") " pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.178612 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwlpz\" (UniqueName: \"kubernetes.io/projected/43844139-65aa-433b-9670-008ab6c350e3-kube-api-access-jwlpz\") pod \"redhat-operators-tr6r7\" (UID: \"43844139-65aa-433b-9670-008ab6c350e3\") " pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.280103 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwlpz\" (UniqueName: \"kubernetes.io/projected/43844139-65aa-433b-9670-008ab6c350e3-kube-api-access-jwlpz\") pod \"redhat-operators-tr6r7\" (UID: \"43844139-65aa-433b-9670-008ab6c350e3\") " pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.280175 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43844139-65aa-433b-9670-008ab6c350e3-catalog-content\") pod \"redhat-operators-tr6r7\" (UID: \"43844139-65aa-433b-9670-008ab6c350e3\") " pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.280219 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43844139-65aa-433b-9670-008ab6c350e3-utilities\") pod \"redhat-operators-tr6r7\" (UID: \"43844139-65aa-433b-9670-008ab6c350e3\") " pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.280719 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43844139-65aa-433b-9670-008ab6c350e3-catalog-content\") pod \"redhat-operators-tr6r7\" (UID: \"43844139-65aa-433b-9670-008ab6c350e3\") " pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.280752 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43844139-65aa-433b-9670-008ab6c350e3-utilities\") pod \"redhat-operators-tr6r7\" (UID: \"43844139-65aa-433b-9670-008ab6c350e3\") " pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.307296 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwlpz\" (UniqueName: \"kubernetes.io/projected/43844139-65aa-433b-9670-008ab6c350e3-kube-api-access-jwlpz\") pod \"redhat-operators-tr6r7\" (UID: \"43844139-65aa-433b-9670-008ab6c350e3\") " pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.461856 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:25 crc kubenswrapper[4922]: I0929 23:00:25.920008 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tr6r7"] Sep 29 23:00:26 crc kubenswrapper[4922]: I0929 23:00:26.145992 4922 generic.go:334] "Generic (PLEG): container finished" podID="9431eea3-dd10-4635-962e-6431d35ce55c" containerID="acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5" exitCode=0 Sep 29 23:00:26 crc kubenswrapper[4922]: I0929 23:00:26.146099 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gvwn6" event={"ID":"9431eea3-dd10-4635-962e-6431d35ce55c","Type":"ContainerDied","Data":"acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5"} Sep 29 23:00:26 crc kubenswrapper[4922]: I0929 23:00:26.151539 4922 generic.go:334] "Generic (PLEG): container finished" podID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerID="e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12" exitCode=0 Sep 29 23:00:26 crc kubenswrapper[4922]: I0929 23:00:26.151572 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfhnx" event={"ID":"b981b625-eabb-4d14-bb8f-6f350e0a0bb3","Type":"ContainerDied","Data":"e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12"} Sep 29 23:00:26 crc kubenswrapper[4922]: I0929 23:00:26.153422 4922 generic.go:334] "Generic (PLEG): container finished" podID="43844139-65aa-433b-9670-008ab6c350e3" containerID="7b347ee2bc9af75ecaab6747ae50e8ca4fd0d8aedb5f0953dd81630c0dc35558" exitCode=0 Sep 29 23:00:26 crc kubenswrapper[4922]: I0929 23:00:26.153459 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tr6r7" event={"ID":"43844139-65aa-433b-9670-008ab6c350e3","Type":"ContainerDied","Data":"7b347ee2bc9af75ecaab6747ae50e8ca4fd0d8aedb5f0953dd81630c0dc35558"} Sep 29 23:00:26 crc kubenswrapper[4922]: I0929 23:00:26.153513 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tr6r7" event={"ID":"43844139-65aa-433b-9670-008ab6c350e3","Type":"ContainerStarted","Data":"a0048d1a5f33c4c8216e132ae46d5e7cf3634d6536e17b6a5878506e7d2e26c5"} Sep 29 23:00:27 crc kubenswrapper[4922]: I0929 23:00:27.163651 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gvwn6" event={"ID":"9431eea3-dd10-4635-962e-6431d35ce55c","Type":"ContainerStarted","Data":"d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d"} Sep 29 23:00:27 crc kubenswrapper[4922]: I0929 23:00:27.166822 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfhnx" event={"ID":"b981b625-eabb-4d14-bb8f-6f350e0a0bb3","Type":"ContainerStarted","Data":"b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c"} Sep 29 23:00:27 crc kubenswrapper[4922]: I0929 23:00:27.184700 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gvwn6" podStartSLOduration=2.67272763 podStartE2EDuration="5.184679947s" podCreationTimestamp="2025-09-29 23:00:22 +0000 UTC" firstStartedPulling="2025-09-29 23:00:24.124258208 +0000 UTC m=+2028.434547061" lastFinishedPulling="2025-09-29 23:00:26.636210525 +0000 UTC m=+2030.946499378" observedRunningTime="2025-09-29 23:00:27.183570499 +0000 UTC m=+2031.493859332" watchObservedRunningTime="2025-09-29 23:00:27.184679947 +0000 UTC m=+2031.494968760" Sep 29 23:00:27 crc kubenswrapper[4922]: I0929 23:00:27.209340 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lfhnx" podStartSLOduration=2.6555020750000002 podStartE2EDuration="5.209322063s" podCreationTimestamp="2025-09-29 23:00:22 +0000 UTC" firstStartedPulling="2025-09-29 23:00:24.126671137 +0000 UTC m=+2028.436959980" lastFinishedPulling="2025-09-29 23:00:26.680491115 +0000 UTC m=+2030.990779968" observedRunningTime="2025-09-29 23:00:27.20308303 +0000 UTC m=+2031.513371853" watchObservedRunningTime="2025-09-29 23:00:27.209322063 +0000 UTC m=+2031.519610876" Sep 29 23:00:33 crc kubenswrapper[4922]: I0929 23:00:33.083223 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:33 crc kubenswrapper[4922]: I0929 23:00:33.083790 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:33 crc kubenswrapper[4922]: I0929 23:00:33.170174 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:33 crc kubenswrapper[4922]: I0929 23:00:33.215585 4922 generic.go:334] "Generic (PLEG): container finished" podID="43844139-65aa-433b-9670-008ab6c350e3" containerID="4cabcc16cc24644ea459c4e71d4007dd87fc1c6b72c4113cf33fb097074092ef" exitCode=0 Sep 29 23:00:33 crc kubenswrapper[4922]: I0929 23:00:33.215653 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tr6r7" event={"ID":"43844139-65aa-433b-9670-008ab6c350e3","Type":"ContainerDied","Data":"4cabcc16cc24644ea459c4e71d4007dd87fc1c6b72c4113cf33fb097074092ef"} Sep 29 23:00:33 crc kubenswrapper[4922]: I0929 23:00:33.247928 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:33 crc kubenswrapper[4922]: I0929 23:00:33.247998 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:33 crc kubenswrapper[4922]: I0929 23:00:33.290387 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:33 crc kubenswrapper[4922]: I0929 23:00:33.328160 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:34 crc kubenswrapper[4922]: I0929 23:00:34.304458 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:35 crc kubenswrapper[4922]: I0929 23:00:35.412024 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lfhnx"] Sep 29 23:00:35 crc kubenswrapper[4922]: I0929 23:00:35.412579 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lfhnx" podUID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerName="registry-server" containerID="cri-o://b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c" gracePeriod=2 Sep 29 23:00:35 crc kubenswrapper[4922]: I0929 23:00:35.608985 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gvwn6"] Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.056267 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.149459 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjmnk\" (UniqueName: \"kubernetes.io/projected/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-kube-api-access-gjmnk\") pod \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.149563 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-catalog-content\") pod \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.149651 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-utilities\") pod \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\" (UID: \"b981b625-eabb-4d14-bb8f-6f350e0a0bb3\") " Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.152164 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-utilities" (OuterVolumeSpecName: "utilities") pod "b981b625-eabb-4d14-bb8f-6f350e0a0bb3" (UID: "b981b625-eabb-4d14-bb8f-6f350e0a0bb3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.158607 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-kube-api-access-gjmnk" (OuterVolumeSpecName: "kube-api-access-gjmnk") pod "b981b625-eabb-4d14-bb8f-6f350e0a0bb3" (UID: "b981b625-eabb-4d14-bb8f-6f350e0a0bb3"). InnerVolumeSpecName "kube-api-access-gjmnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.224376 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b981b625-eabb-4d14-bb8f-6f350e0a0bb3" (UID: "b981b625-eabb-4d14-bb8f-6f350e0a0bb3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.249994 4922 generic.go:334] "Generic (PLEG): container finished" podID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerID="b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c" exitCode=0 Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.250071 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfhnx" event={"ID":"b981b625-eabb-4d14-bb8f-6f350e0a0bb3","Type":"ContainerDied","Data":"b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c"} Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.250159 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfhnx" event={"ID":"b981b625-eabb-4d14-bb8f-6f350e0a0bb3","Type":"ContainerDied","Data":"adb99625c24be6074c0d9080ff60211cfbf9533e8c40265e2768232cf1c1389a"} Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.250203 4922 scope.go:117] "RemoveContainer" containerID="b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.250319 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gvwn6" podUID="9431eea3-dd10-4635-962e-6431d35ce55c" containerName="registry-server" containerID="cri-o://d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d" gracePeriod=2 Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.251370 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfhnx" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.251577 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjmnk\" (UniqueName: \"kubernetes.io/projected/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-kube-api-access-gjmnk\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.251996 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.252238 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b981b625-eabb-4d14-bb8f-6f350e0a0bb3-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.285265 4922 scope.go:117] "RemoveContainer" containerID="e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.304679 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lfhnx"] Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.313211 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lfhnx"] Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.369134 4922 scope.go:117] "RemoveContainer" containerID="d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.402105 4922 scope.go:117] "RemoveContainer" containerID="b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c" Sep 29 23:00:36 crc kubenswrapper[4922]: E0929 23:00:36.402648 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c\": container with ID starting with b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c not found: ID does not exist" containerID="b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.402696 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c"} err="failed to get container status \"b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c\": rpc error: code = NotFound desc = could not find container \"b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c\": container with ID starting with b5e051af6fa5efd6d81af7af8f23b0ab3ac6d45c07c2bb238467ec34a33f744c not found: ID does not exist" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.402716 4922 scope.go:117] "RemoveContainer" containerID="e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12" Sep 29 23:00:36 crc kubenswrapper[4922]: E0929 23:00:36.403127 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12\": container with ID starting with e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12 not found: ID does not exist" containerID="e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.403148 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12"} err="failed to get container status \"e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12\": rpc error: code = NotFound desc = could not find container \"e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12\": container with ID starting with e16745f0b3eb083b30ca0426e35260ed6454c8f3e29a4798611f5031950c8f12 not found: ID does not exist" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.403159 4922 scope.go:117] "RemoveContainer" containerID="d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c" Sep 29 23:00:36 crc kubenswrapper[4922]: E0929 23:00:36.403419 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c\": container with ID starting with d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c not found: ID does not exist" containerID="d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.403435 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c"} err="failed to get container status \"d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c\": rpc error: code = NotFound desc = could not find container \"d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c\": container with ID starting with d3b3adfa942e20340a6cf1271f1279c8a2f739f21fd00b25143c0b1282b7342c not found: ID does not exist" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.433784 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" path="/var/lib/kubelet/pods/b981b625-eabb-4d14-bb8f-6f350e0a0bb3/volumes" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.671316 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.761193 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-utilities\") pod \"9431eea3-dd10-4635-962e-6431d35ce55c\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.761275 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-catalog-content\") pod \"9431eea3-dd10-4635-962e-6431d35ce55c\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.761307 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvhkp\" (UniqueName: \"kubernetes.io/projected/9431eea3-dd10-4635-962e-6431d35ce55c-kube-api-access-vvhkp\") pod \"9431eea3-dd10-4635-962e-6431d35ce55c\" (UID: \"9431eea3-dd10-4635-962e-6431d35ce55c\") " Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.762686 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-utilities" (OuterVolumeSpecName: "utilities") pod "9431eea3-dd10-4635-962e-6431d35ce55c" (UID: "9431eea3-dd10-4635-962e-6431d35ce55c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.765889 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9431eea3-dd10-4635-962e-6431d35ce55c-kube-api-access-vvhkp" (OuterVolumeSpecName: "kube-api-access-vvhkp") pod "9431eea3-dd10-4635-962e-6431d35ce55c" (UID: "9431eea3-dd10-4635-962e-6431d35ce55c"). InnerVolumeSpecName "kube-api-access-vvhkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.775497 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9431eea3-dd10-4635-962e-6431d35ce55c" (UID: "9431eea3-dd10-4635-962e-6431d35ce55c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.864530 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.864601 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9431eea3-dd10-4635-962e-6431d35ce55c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:36 crc kubenswrapper[4922]: I0929 23:00:36.864625 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvhkp\" (UniqueName: \"kubernetes.io/projected/9431eea3-dd10-4635-962e-6431d35ce55c-kube-api-access-vvhkp\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.263454 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tr6r7" event={"ID":"43844139-65aa-433b-9670-008ab6c350e3","Type":"ContainerStarted","Data":"3665f8c3dd68206bebebb6556a2353c276a1f1e11991f46c7b2bea2f5a4ce063"} Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.272065 4922 generic.go:334] "Generic (PLEG): container finished" podID="9431eea3-dd10-4635-962e-6431d35ce55c" containerID="d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d" exitCode=0 Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.272280 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gvwn6" event={"ID":"9431eea3-dd10-4635-962e-6431d35ce55c","Type":"ContainerDied","Data":"d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d"} Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.272340 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gvwn6" event={"ID":"9431eea3-dd10-4635-962e-6431d35ce55c","Type":"ContainerDied","Data":"09d920259d64d78c80f41abb546fdd35786fded780000491d84c2f193fdfe84a"} Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.272353 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gvwn6" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.272413 4922 scope.go:117] "RemoveContainer" containerID="d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.316756 4922 scope.go:117] "RemoveContainer" containerID="acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.329864 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tr6r7" podStartSLOduration=2.403091124 podStartE2EDuration="12.329844183s" podCreationTimestamp="2025-09-29 23:00:25 +0000 UTC" firstStartedPulling="2025-09-29 23:00:26.154777443 +0000 UTC m=+2030.465066256" lastFinishedPulling="2025-09-29 23:00:36.081530502 +0000 UTC m=+2040.391819315" observedRunningTime="2025-09-29 23:00:37.300646284 +0000 UTC m=+2041.610935127" watchObservedRunningTime="2025-09-29 23:00:37.329844183 +0000 UTC m=+2041.640132996" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.334571 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gvwn6"] Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.336516 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gvwn6"] Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.360955 4922 scope.go:117] "RemoveContainer" containerID="b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.393609 4922 scope.go:117] "RemoveContainer" containerID="d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d" Sep 29 23:00:37 crc kubenswrapper[4922]: E0929 23:00:37.394177 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d\": container with ID starting with d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d not found: ID does not exist" containerID="d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.394238 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d"} err="failed to get container status \"d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d\": rpc error: code = NotFound desc = could not find container \"d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d\": container with ID starting with d2ae054498ea9426dd01ffb617943ca73d7424d9a8083678052eb0260ad8707d not found: ID does not exist" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.394276 4922 scope.go:117] "RemoveContainer" containerID="acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5" Sep 29 23:00:37 crc kubenswrapper[4922]: E0929 23:00:37.394712 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5\": container with ID starting with acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5 not found: ID does not exist" containerID="acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.394772 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5"} err="failed to get container status \"acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5\": rpc error: code = NotFound desc = could not find container \"acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5\": container with ID starting with acec318825b418dada4a81d65d8a5176ce73ed0b4c18078e03c6c79a537a77a5 not found: ID does not exist" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.394811 4922 scope.go:117] "RemoveContainer" containerID="b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4" Sep 29 23:00:37 crc kubenswrapper[4922]: E0929 23:00:37.395165 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4\": container with ID starting with b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4 not found: ID does not exist" containerID="b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4" Sep 29 23:00:37 crc kubenswrapper[4922]: I0929 23:00:37.395209 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4"} err="failed to get container status \"b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4\": rpc error: code = NotFound desc = could not find container \"b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4\": container with ID starting with b67cd404aa009f4ae31a0a56e55183cbb83e71ae87f1eac79959e7916c5ac7a4 not found: ID does not exist" Sep 29 23:00:38 crc kubenswrapper[4922]: I0929 23:00:38.437131 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9431eea3-dd10-4635-962e-6431d35ce55c" path="/var/lib/kubelet/pods/9431eea3-dd10-4635-962e-6431d35ce55c/volumes" Sep 29 23:00:45 crc kubenswrapper[4922]: I0929 23:00:45.462260 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:45 crc kubenswrapper[4922]: I0929 23:00:45.463532 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:45 crc kubenswrapper[4922]: I0929 23:00:45.536296 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:46 crc kubenswrapper[4922]: I0929 23:00:46.441724 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tr6r7" Sep 29 23:00:46 crc kubenswrapper[4922]: I0929 23:00:46.565814 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tr6r7"] Sep 29 23:00:46 crc kubenswrapper[4922]: I0929 23:00:46.615611 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vl2vq"] Sep 29 23:00:46 crc kubenswrapper[4922]: I0929 23:00:46.616833 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vl2vq" podUID="c532762f-a530-445f-bb45-08438b834442" containerName="registry-server" containerID="cri-o://696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d" gracePeriod=2 Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.005085 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.039040 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7zw8\" (UniqueName: \"kubernetes.io/projected/c532762f-a530-445f-bb45-08438b834442-kube-api-access-l7zw8\") pod \"c532762f-a530-445f-bb45-08438b834442\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.039146 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-utilities\") pod \"c532762f-a530-445f-bb45-08438b834442\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.039202 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-catalog-content\") pod \"c532762f-a530-445f-bb45-08438b834442\" (UID: \"c532762f-a530-445f-bb45-08438b834442\") " Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.039771 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-utilities" (OuterVolumeSpecName: "utilities") pod "c532762f-a530-445f-bb45-08438b834442" (UID: "c532762f-a530-445f-bb45-08438b834442"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.045848 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c532762f-a530-445f-bb45-08438b834442-kube-api-access-l7zw8" (OuterVolumeSpecName: "kube-api-access-l7zw8") pod "c532762f-a530-445f-bb45-08438b834442" (UID: "c532762f-a530-445f-bb45-08438b834442"). InnerVolumeSpecName "kube-api-access-l7zw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.105230 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c532762f-a530-445f-bb45-08438b834442" (UID: "c532762f-a530-445f-bb45-08438b834442"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.140231 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.140261 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c532762f-a530-445f-bb45-08438b834442-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.140271 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7zw8\" (UniqueName: \"kubernetes.io/projected/c532762f-a530-445f-bb45-08438b834442-kube-api-access-l7zw8\") on node \"crc\" DevicePath \"\"" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.369079 4922 generic.go:334] "Generic (PLEG): container finished" podID="c532762f-a530-445f-bb45-08438b834442" containerID="696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d" exitCode=0 Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.369851 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vl2vq" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.370519 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl2vq" event={"ID":"c532762f-a530-445f-bb45-08438b834442","Type":"ContainerDied","Data":"696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d"} Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.370663 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vl2vq" event={"ID":"c532762f-a530-445f-bb45-08438b834442","Type":"ContainerDied","Data":"2dbc9905c8a20e3d3edf7dcb7e458f78253d3aaf99f8e52182bef06d5f4eee20"} Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.370688 4922 scope.go:117] "RemoveContainer" containerID="696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.391265 4922 scope.go:117] "RemoveContainer" containerID="48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.405139 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vl2vq"] Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.410414 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vl2vq"] Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.419976 4922 scope.go:117] "RemoveContainer" containerID="0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.437163 4922 scope.go:117] "RemoveContainer" containerID="696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d" Sep 29 23:00:47 crc kubenswrapper[4922]: E0929 23:00:47.437728 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d\": container with ID starting with 696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d not found: ID does not exist" containerID="696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.437794 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d"} err="failed to get container status \"696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d\": rpc error: code = NotFound desc = could not find container \"696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d\": container with ID starting with 696bfd0701aedb3419b0eaf8dd280abee38fabe3411969a96eae2229b5e6773d not found: ID does not exist" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.437822 4922 scope.go:117] "RemoveContainer" containerID="48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232" Sep 29 23:00:47 crc kubenswrapper[4922]: E0929 23:00:47.438283 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232\": container with ID starting with 48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232 not found: ID does not exist" containerID="48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.438314 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232"} err="failed to get container status \"48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232\": rpc error: code = NotFound desc = could not find container \"48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232\": container with ID starting with 48c5e72f341865f18f7f4f1521c25ab13b3b5b3566fcfb6f77a0f56f14aab232 not found: ID does not exist" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.438337 4922 scope.go:117] "RemoveContainer" containerID="0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6" Sep 29 23:00:47 crc kubenswrapper[4922]: E0929 23:00:47.438950 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6\": container with ID starting with 0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6 not found: ID does not exist" containerID="0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6" Sep 29 23:00:47 crc kubenswrapper[4922]: I0929 23:00:47.438991 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6"} err="failed to get container status \"0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6\": rpc error: code = NotFound desc = could not find container \"0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6\": container with ID starting with 0abf03b2b6fb833c79c2cff5a2462bc475b22dd48660441405282936ccecf9a6 not found: ID does not exist" Sep 29 23:00:48 crc kubenswrapper[4922]: I0929 23:00:48.431669 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c532762f-a530-445f-bb45-08438b834442" path="/var/lib/kubelet/pods/c532762f-a530-445f-bb45-08438b834442/volumes" Sep 29 23:00:50 crc kubenswrapper[4922]: I0929 23:00:50.227285 4922 scope.go:117] "RemoveContainer" containerID="be0c7ddb6f78a9be42ceaf431fcd784b5ca3fed33b714a6a3e14d4a1b86e3011" Sep 29 23:01:58 crc kubenswrapper[4922]: I0929 23:01:58.912920 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:01:58 crc kubenswrapper[4922]: I0929 23:01:58.913576 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:02:28 crc kubenswrapper[4922]: I0929 23:02:28.912775 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:02:28 crc kubenswrapper[4922]: I0929 23:02:28.913527 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:02:58 crc kubenswrapper[4922]: I0929 23:02:58.913751 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:02:58 crc kubenswrapper[4922]: I0929 23:02:58.916253 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:02:58 crc kubenswrapper[4922]: I0929 23:02:58.916609 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:02:58 crc kubenswrapper[4922]: I0929 23:02:58.917983 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a0e81052cb40277600e3b60604bc5a66510fa0b4da4e03a262d1b5100500286b"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:02:58 crc kubenswrapper[4922]: I0929 23:02:58.918285 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://a0e81052cb40277600e3b60604bc5a66510fa0b4da4e03a262d1b5100500286b" gracePeriod=600 Sep 29 23:02:59 crc kubenswrapper[4922]: I0929 23:02:59.586361 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="a0e81052cb40277600e3b60604bc5a66510fa0b4da4e03a262d1b5100500286b" exitCode=0 Sep 29 23:02:59 crc kubenswrapper[4922]: I0929 23:02:59.586485 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"a0e81052cb40277600e3b60604bc5a66510fa0b4da4e03a262d1b5100500286b"} Sep 29 23:02:59 crc kubenswrapper[4922]: I0929 23:02:59.586676 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab"} Sep 29 23:02:59 crc kubenswrapper[4922]: I0929 23:02:59.586696 4922 scope.go:117] "RemoveContainer" containerID="1b6216ff96b2fecc06b3ea6a193ebffe3d0bc3934c2bdf75b5318251bfe0c16d" Sep 29 23:05:28 crc kubenswrapper[4922]: I0929 23:05:28.913348 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:05:28 crc kubenswrapper[4922]: I0929 23:05:28.915627 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:05:58 crc kubenswrapper[4922]: I0929 23:05:58.913246 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:05:58 crc kubenswrapper[4922]: I0929 23:05:58.913937 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.247460 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hhckg"] Sep 29 23:06:08 crc kubenswrapper[4922]: E0929 23:06:08.248430 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c532762f-a530-445f-bb45-08438b834442" containerName="extract-utilities" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248451 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c532762f-a530-445f-bb45-08438b834442" containerName="extract-utilities" Sep 29 23:06:08 crc kubenswrapper[4922]: E0929 23:06:08.248472 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c532762f-a530-445f-bb45-08438b834442" containerName="extract-content" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248486 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c532762f-a530-445f-bb45-08438b834442" containerName="extract-content" Sep 29 23:06:08 crc kubenswrapper[4922]: E0929 23:06:08.248501 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9431eea3-dd10-4635-962e-6431d35ce55c" containerName="registry-server" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248514 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9431eea3-dd10-4635-962e-6431d35ce55c" containerName="registry-server" Sep 29 23:06:08 crc kubenswrapper[4922]: E0929 23:06:08.248547 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerName="extract-utilities" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248558 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerName="extract-utilities" Sep 29 23:06:08 crc kubenswrapper[4922]: E0929 23:06:08.248575 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9431eea3-dd10-4635-962e-6431d35ce55c" containerName="extract-utilities" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248587 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9431eea3-dd10-4635-962e-6431d35ce55c" containerName="extract-utilities" Sep 29 23:06:08 crc kubenswrapper[4922]: E0929 23:06:08.248606 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerName="registry-server" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248617 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerName="registry-server" Sep 29 23:06:08 crc kubenswrapper[4922]: E0929 23:06:08.248633 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9431eea3-dd10-4635-962e-6431d35ce55c" containerName="extract-content" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248644 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9431eea3-dd10-4635-962e-6431d35ce55c" containerName="extract-content" Sep 29 23:06:08 crc kubenswrapper[4922]: E0929 23:06:08.248669 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c532762f-a530-445f-bb45-08438b834442" containerName="registry-server" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248682 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c532762f-a530-445f-bb45-08438b834442" containerName="registry-server" Sep 29 23:06:08 crc kubenswrapper[4922]: E0929 23:06:08.248697 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerName="extract-content" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248709 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerName="extract-content" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248956 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b981b625-eabb-4d14-bb8f-6f350e0a0bb3" containerName="registry-server" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.248980 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9431eea3-dd10-4635-962e-6431d35ce55c" containerName="registry-server" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.249019 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c532762f-a530-445f-bb45-08438b834442" containerName="registry-server" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.255377 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.273612 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hhckg"] Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.441783 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwd5q\" (UniqueName: \"kubernetes.io/projected/331f6e00-a958-4a5c-96de-411890893fad-kube-api-access-qwd5q\") pod \"community-operators-hhckg\" (UID: \"331f6e00-a958-4a5c-96de-411890893fad\") " pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.441850 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/331f6e00-a958-4a5c-96de-411890893fad-utilities\") pod \"community-operators-hhckg\" (UID: \"331f6e00-a958-4a5c-96de-411890893fad\") " pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.441933 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/331f6e00-a958-4a5c-96de-411890893fad-catalog-content\") pod \"community-operators-hhckg\" (UID: \"331f6e00-a958-4a5c-96de-411890893fad\") " pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.543728 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwd5q\" (UniqueName: \"kubernetes.io/projected/331f6e00-a958-4a5c-96de-411890893fad-kube-api-access-qwd5q\") pod \"community-operators-hhckg\" (UID: \"331f6e00-a958-4a5c-96de-411890893fad\") " pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.543819 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/331f6e00-a958-4a5c-96de-411890893fad-utilities\") pod \"community-operators-hhckg\" (UID: \"331f6e00-a958-4a5c-96de-411890893fad\") " pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.543937 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/331f6e00-a958-4a5c-96de-411890893fad-catalog-content\") pod \"community-operators-hhckg\" (UID: \"331f6e00-a958-4a5c-96de-411890893fad\") " pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.544753 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/331f6e00-a958-4a5c-96de-411890893fad-catalog-content\") pod \"community-operators-hhckg\" (UID: \"331f6e00-a958-4a5c-96de-411890893fad\") " pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.546658 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/331f6e00-a958-4a5c-96de-411890893fad-utilities\") pod \"community-operators-hhckg\" (UID: \"331f6e00-a958-4a5c-96de-411890893fad\") " pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.569628 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwd5q\" (UniqueName: \"kubernetes.io/projected/331f6e00-a958-4a5c-96de-411890893fad-kube-api-access-qwd5q\") pod \"community-operators-hhckg\" (UID: \"331f6e00-a958-4a5c-96de-411890893fad\") " pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.591923 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:08 crc kubenswrapper[4922]: I0929 23:06:08.908903 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hhckg"] Sep 29 23:06:09 crc kubenswrapper[4922]: I0929 23:06:09.385887 4922 generic.go:334] "Generic (PLEG): container finished" podID="331f6e00-a958-4a5c-96de-411890893fad" containerID="b874f0358da7168d810feaf1357d45eb19696fe7da1ecb07fa1795233852f28e" exitCode=0 Sep 29 23:06:09 crc kubenswrapper[4922]: I0929 23:06:09.386120 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhckg" event={"ID":"331f6e00-a958-4a5c-96de-411890893fad","Type":"ContainerDied","Data":"b874f0358da7168d810feaf1357d45eb19696fe7da1ecb07fa1795233852f28e"} Sep 29 23:06:09 crc kubenswrapper[4922]: I0929 23:06:09.386635 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhckg" event={"ID":"331f6e00-a958-4a5c-96de-411890893fad","Type":"ContainerStarted","Data":"4e2a3d026f492b49acdc876a41e821e5ee24d97ab926229628310a20b34791e5"} Sep 29 23:06:09 crc kubenswrapper[4922]: I0929 23:06:09.388380 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 23:06:13 crc kubenswrapper[4922]: I0929 23:06:13.451790 4922 generic.go:334] "Generic (PLEG): container finished" podID="331f6e00-a958-4a5c-96de-411890893fad" containerID="24dfc188961a8e86e26e603f5be31495c75f3e0af49878c2a01efb9becc617ee" exitCode=0 Sep 29 23:06:13 crc kubenswrapper[4922]: I0929 23:06:13.451899 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhckg" event={"ID":"331f6e00-a958-4a5c-96de-411890893fad","Type":"ContainerDied","Data":"24dfc188961a8e86e26e603f5be31495c75f3e0af49878c2a01efb9becc617ee"} Sep 29 23:06:14 crc kubenswrapper[4922]: I0929 23:06:14.464923 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hhckg" event={"ID":"331f6e00-a958-4a5c-96de-411890893fad","Type":"ContainerStarted","Data":"2e52443bc68ad176f66e5e320d928fb1afbc182c989f6eb23015b908d2de6fae"} Sep 29 23:06:18 crc kubenswrapper[4922]: I0929 23:06:18.593195 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:18 crc kubenswrapper[4922]: I0929 23:06:18.593602 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:18 crc kubenswrapper[4922]: I0929 23:06:18.666472 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:18 crc kubenswrapper[4922]: I0929 23:06:18.697214 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hhckg" podStartSLOduration=6.177555761 podStartE2EDuration="10.697187992s" podCreationTimestamp="2025-09-29 23:06:08 +0000 UTC" firstStartedPulling="2025-09-29 23:06:09.387993078 +0000 UTC m=+2373.698281921" lastFinishedPulling="2025-09-29 23:06:13.907625299 +0000 UTC m=+2378.217914152" observedRunningTime="2025-09-29 23:06:14.493900204 +0000 UTC m=+2378.804189057" watchObservedRunningTime="2025-09-29 23:06:18.697187992 +0000 UTC m=+2383.007476845" Sep 29 23:06:19 crc kubenswrapper[4922]: I0929 23:06:19.584752 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hhckg" Sep 29 23:06:19 crc kubenswrapper[4922]: I0929 23:06:19.683323 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hhckg"] Sep 29 23:06:19 crc kubenswrapper[4922]: I0929 23:06:19.734508 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-49hdj"] Sep 29 23:06:19 crc kubenswrapper[4922]: I0929 23:06:19.734805 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-49hdj" podUID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerName="registry-server" containerID="cri-o://73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d" gracePeriod=2 Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.232704 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49hdj" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.339582 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbwnq\" (UniqueName: \"kubernetes.io/projected/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-kube-api-access-zbwnq\") pod \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.339692 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-utilities\") pod \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.339723 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-catalog-content\") pod \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\" (UID: \"ac2c8151-6c2e-4341-a5fd-0beb09dcba81\") " Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.340193 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-utilities" (OuterVolumeSpecName: "utilities") pod "ac2c8151-6c2e-4341-a5fd-0beb09dcba81" (UID: "ac2c8151-6c2e-4341-a5fd-0beb09dcba81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.345621 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-kube-api-access-zbwnq" (OuterVolumeSpecName: "kube-api-access-zbwnq") pod "ac2c8151-6c2e-4341-a5fd-0beb09dcba81" (UID: "ac2c8151-6c2e-4341-a5fd-0beb09dcba81"). InnerVolumeSpecName "kube-api-access-zbwnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.382753 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac2c8151-6c2e-4341-a5fd-0beb09dcba81" (UID: "ac2c8151-6c2e-4341-a5fd-0beb09dcba81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.440754 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbwnq\" (UniqueName: \"kubernetes.io/projected/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-kube-api-access-zbwnq\") on node \"crc\" DevicePath \"\"" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.440776 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.440785 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac2c8151-6c2e-4341-a5fd-0beb09dcba81-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.519278 4922 generic.go:334] "Generic (PLEG): container finished" podID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerID="73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d" exitCode=0 Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.519347 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49hdj" event={"ID":"ac2c8151-6c2e-4341-a5fd-0beb09dcba81","Type":"ContainerDied","Data":"73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d"} Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.519375 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49hdj" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.519414 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49hdj" event={"ID":"ac2c8151-6c2e-4341-a5fd-0beb09dcba81","Type":"ContainerDied","Data":"77977630dceb27ba17bbf1a439a29111ead811255f60fa3cec45798d58514d35"} Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.519437 4922 scope.go:117] "RemoveContainer" containerID="73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.550645 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-49hdj"] Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.555341 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-49hdj"] Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.556972 4922 scope.go:117] "RemoveContainer" containerID="bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.588908 4922 scope.go:117] "RemoveContainer" containerID="0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.611470 4922 scope.go:117] "RemoveContainer" containerID="73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d" Sep 29 23:06:20 crc kubenswrapper[4922]: E0929 23:06:20.611953 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d\": container with ID starting with 73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d not found: ID does not exist" containerID="73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.611996 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d"} err="failed to get container status \"73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d\": rpc error: code = NotFound desc = could not find container \"73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d\": container with ID starting with 73d15dc99b89010e7d9d7e87cbeb954c7234e59e27073f51a81521646090a23d not found: ID does not exist" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.612022 4922 scope.go:117] "RemoveContainer" containerID="bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933" Sep 29 23:06:20 crc kubenswrapper[4922]: E0929 23:06:20.612655 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933\": container with ID starting with bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933 not found: ID does not exist" containerID="bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.612688 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933"} err="failed to get container status \"bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933\": rpc error: code = NotFound desc = could not find container \"bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933\": container with ID starting with bd52ca0cf2920db222be65ce2a05f2399a34bcfe967ffbd13fba4ae8001eb933 not found: ID does not exist" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.612709 4922 scope.go:117] "RemoveContainer" containerID="0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e" Sep 29 23:06:20 crc kubenswrapper[4922]: E0929 23:06:20.613109 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e\": container with ID starting with 0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e not found: ID does not exist" containerID="0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e" Sep 29 23:06:20 crc kubenswrapper[4922]: I0929 23:06:20.613152 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e"} err="failed to get container status \"0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e\": rpc error: code = NotFound desc = could not find container \"0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e\": container with ID starting with 0dbd1c32fa8f7cf595c3f224a48470d9a390308747216ef791fa7d9fd558070e not found: ID does not exist" Sep 29 23:06:22 crc kubenswrapper[4922]: I0929 23:06:22.430033 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" path="/var/lib/kubelet/pods/ac2c8151-6c2e-4341-a5fd-0beb09dcba81/volumes" Sep 29 23:06:28 crc kubenswrapper[4922]: I0929 23:06:28.912953 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:06:28 crc kubenswrapper[4922]: I0929 23:06:28.913670 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:06:28 crc kubenswrapper[4922]: I0929 23:06:28.913731 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:06:28 crc kubenswrapper[4922]: I0929 23:06:28.914482 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:06:28 crc kubenswrapper[4922]: I0929 23:06:28.914581 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" gracePeriod=600 Sep 29 23:06:29 crc kubenswrapper[4922]: E0929 23:06:29.060373 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:06:29 crc kubenswrapper[4922]: I0929 23:06:29.610033 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" exitCode=0 Sep 29 23:06:29 crc kubenswrapper[4922]: I0929 23:06:29.610109 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab"} Sep 29 23:06:29 crc kubenswrapper[4922]: I0929 23:06:29.610162 4922 scope.go:117] "RemoveContainer" containerID="a0e81052cb40277600e3b60604bc5a66510fa0b4da4e03a262d1b5100500286b" Sep 29 23:06:29 crc kubenswrapper[4922]: I0929 23:06:29.611000 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:06:29 crc kubenswrapper[4922]: E0929 23:06:29.611370 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:06:41 crc kubenswrapper[4922]: I0929 23:06:41.422080 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:06:41 crc kubenswrapper[4922]: E0929 23:06:41.423057 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:06:54 crc kubenswrapper[4922]: I0929 23:06:54.423560 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:06:54 crc kubenswrapper[4922]: E0929 23:06:54.424623 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:07:08 crc kubenswrapper[4922]: I0929 23:07:08.422006 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:07:08 crc kubenswrapper[4922]: E0929 23:07:08.423052 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:07:23 crc kubenswrapper[4922]: I0929 23:07:23.422292 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:07:23 crc kubenswrapper[4922]: E0929 23:07:23.423396 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:07:35 crc kubenswrapper[4922]: I0929 23:07:35.422999 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:07:35 crc kubenswrapper[4922]: E0929 23:07:35.424312 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:07:46 crc kubenswrapper[4922]: I0929 23:07:46.425708 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:07:46 crc kubenswrapper[4922]: E0929 23:07:46.426757 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:07:58 crc kubenswrapper[4922]: I0929 23:07:58.422051 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:07:58 crc kubenswrapper[4922]: E0929 23:07:58.423378 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:08:13 crc kubenswrapper[4922]: I0929 23:08:13.421774 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:08:13 crc kubenswrapper[4922]: E0929 23:08:13.422731 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:08:24 crc kubenswrapper[4922]: I0929 23:08:24.421762 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:08:24 crc kubenswrapper[4922]: E0929 23:08:24.424010 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:08:36 crc kubenswrapper[4922]: I0929 23:08:36.430195 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:08:36 crc kubenswrapper[4922]: E0929 23:08:36.431232 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:08:48 crc kubenswrapper[4922]: I0929 23:08:48.422574 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:08:48 crc kubenswrapper[4922]: E0929 23:08:48.423611 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:09:01 crc kubenswrapper[4922]: I0929 23:09:01.422434 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:09:01 crc kubenswrapper[4922]: E0929 23:09:01.423299 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:09:16 crc kubenswrapper[4922]: I0929 23:09:16.430481 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:09:16 crc kubenswrapper[4922]: E0929 23:09:16.431828 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:09:31 crc kubenswrapper[4922]: I0929 23:09:31.422478 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:09:31 crc kubenswrapper[4922]: E0929 23:09:31.423710 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:09:42 crc kubenswrapper[4922]: I0929 23:09:42.422193 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:09:42 crc kubenswrapper[4922]: E0929 23:09:42.423340 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:09:56 crc kubenswrapper[4922]: I0929 23:09:56.429771 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:09:56 crc kubenswrapper[4922]: E0929 23:09:56.431018 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:10:11 crc kubenswrapper[4922]: I0929 23:10:11.422067 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:10:11 crc kubenswrapper[4922]: E0929 23:10:11.423267 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:10:23 crc kubenswrapper[4922]: I0929 23:10:23.422628 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:10:23 crc kubenswrapper[4922]: E0929 23:10:23.423681 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:10:34 crc kubenswrapper[4922]: I0929 23:10:34.422739 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:10:34 crc kubenswrapper[4922]: E0929 23:10:34.423747 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.086460 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-z7nm7"] Sep 29 23:10:40 crc kubenswrapper[4922]: E0929 23:10:40.087639 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerName="extract-utilities" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.087662 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerName="extract-utilities" Sep 29 23:10:40 crc kubenswrapper[4922]: E0929 23:10:40.087689 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerName="registry-server" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.087703 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerName="registry-server" Sep 29 23:10:40 crc kubenswrapper[4922]: E0929 23:10:40.087728 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerName="extract-content" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.087740 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerName="extract-content" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.088025 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac2c8151-6c2e-4341-a5fd-0beb09dcba81" containerName="registry-server" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.090077 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.090813 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z7nm7"] Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.283741 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-utilities\") pod \"redhat-marketplace-z7nm7\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.284235 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb8ww\" (UniqueName: \"kubernetes.io/projected/08e3df2f-9fbe-43fd-8ce5-66078f50f007-kube-api-access-rb8ww\") pod \"redhat-marketplace-z7nm7\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.284515 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-catalog-content\") pod \"redhat-marketplace-z7nm7\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.385598 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb8ww\" (UniqueName: \"kubernetes.io/projected/08e3df2f-9fbe-43fd-8ce5-66078f50f007-kube-api-access-rb8ww\") pod \"redhat-marketplace-z7nm7\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.385690 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-catalog-content\") pod \"redhat-marketplace-z7nm7\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.385719 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-utilities\") pod \"redhat-marketplace-z7nm7\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.386912 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-utilities\") pod \"redhat-marketplace-z7nm7\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.386979 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-catalog-content\") pod \"redhat-marketplace-z7nm7\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.406677 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb8ww\" (UniqueName: \"kubernetes.io/projected/08e3df2f-9fbe-43fd-8ce5-66078f50f007-kube-api-access-rb8ww\") pod \"redhat-marketplace-z7nm7\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.412177 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.830957 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z7nm7"] Sep 29 23:10:40 crc kubenswrapper[4922]: W0929 23:10:40.841918 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08e3df2f_9fbe_43fd_8ce5_66078f50f007.slice/crio-0332bcd95572a46b33ae62991b2077b42d34a83a21368f14305b00de5471d971 WatchSource:0}: Error finding container 0332bcd95572a46b33ae62991b2077b42d34a83a21368f14305b00de5471d971: Status 404 returned error can't find the container with id 0332bcd95572a46b33ae62991b2077b42d34a83a21368f14305b00de5471d971 Sep 29 23:10:40 crc kubenswrapper[4922]: I0929 23:10:40.953012 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z7nm7" event={"ID":"08e3df2f-9fbe-43fd-8ce5-66078f50f007","Type":"ContainerStarted","Data":"0332bcd95572a46b33ae62991b2077b42d34a83a21368f14305b00de5471d971"} Sep 29 23:10:41 crc kubenswrapper[4922]: I0929 23:10:41.964179 4922 generic.go:334] "Generic (PLEG): container finished" podID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerID="4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c" exitCode=0 Sep 29 23:10:41 crc kubenswrapper[4922]: I0929 23:10:41.964245 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z7nm7" event={"ID":"08e3df2f-9fbe-43fd-8ce5-66078f50f007","Type":"ContainerDied","Data":"4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c"} Sep 29 23:10:42 crc kubenswrapper[4922]: I0929 23:10:42.972364 4922 generic.go:334] "Generic (PLEG): container finished" podID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerID="784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910" exitCode=0 Sep 29 23:10:42 crc kubenswrapper[4922]: I0929 23:10:42.972537 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z7nm7" event={"ID":"08e3df2f-9fbe-43fd-8ce5-66078f50f007","Type":"ContainerDied","Data":"784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910"} Sep 29 23:10:43 crc kubenswrapper[4922]: I0929 23:10:43.985482 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z7nm7" event={"ID":"08e3df2f-9fbe-43fd-8ce5-66078f50f007","Type":"ContainerStarted","Data":"675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7"} Sep 29 23:10:44 crc kubenswrapper[4922]: I0929 23:10:44.015260 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-z7nm7" podStartSLOduration=2.535914886 podStartE2EDuration="4.015238827s" podCreationTimestamp="2025-09-29 23:10:40 +0000 UTC" firstStartedPulling="2025-09-29 23:10:41.967716665 +0000 UTC m=+2646.278005518" lastFinishedPulling="2025-09-29 23:10:43.447040616 +0000 UTC m=+2647.757329459" observedRunningTime="2025-09-29 23:10:44.015210646 +0000 UTC m=+2648.325499499" watchObservedRunningTime="2025-09-29 23:10:44.015238827 +0000 UTC m=+2648.325527660" Sep 29 23:10:47 crc kubenswrapper[4922]: I0929 23:10:47.423702 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:10:47 crc kubenswrapper[4922]: E0929 23:10:47.424453 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:10:50 crc kubenswrapper[4922]: I0929 23:10:50.413001 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:50 crc kubenswrapper[4922]: I0929 23:10:50.413468 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:50 crc kubenswrapper[4922]: I0929 23:10:50.494084 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:51 crc kubenswrapper[4922]: I0929 23:10:51.129842 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:51 crc kubenswrapper[4922]: I0929 23:10:51.199823 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z7nm7"] Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.076109 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-z7nm7" podUID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerName="registry-server" containerID="cri-o://675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7" gracePeriod=2 Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.555847 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.718317 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-catalog-content\") pod \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.718487 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb8ww\" (UniqueName: \"kubernetes.io/projected/08e3df2f-9fbe-43fd-8ce5-66078f50f007-kube-api-access-rb8ww\") pod \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.718555 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-utilities\") pod \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\" (UID: \"08e3df2f-9fbe-43fd-8ce5-66078f50f007\") " Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.719954 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-utilities" (OuterVolumeSpecName: "utilities") pod "08e3df2f-9fbe-43fd-8ce5-66078f50f007" (UID: "08e3df2f-9fbe-43fd-8ce5-66078f50f007"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.720457 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.728255 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08e3df2f-9fbe-43fd-8ce5-66078f50f007-kube-api-access-rb8ww" (OuterVolumeSpecName: "kube-api-access-rb8ww") pod "08e3df2f-9fbe-43fd-8ce5-66078f50f007" (UID: "08e3df2f-9fbe-43fd-8ce5-66078f50f007"). InnerVolumeSpecName "kube-api-access-rb8ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.743978 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08e3df2f-9fbe-43fd-8ce5-66078f50f007" (UID: "08e3df2f-9fbe-43fd-8ce5-66078f50f007"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.821230 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08e3df2f-9fbe-43fd-8ce5-66078f50f007-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:10:53 crc kubenswrapper[4922]: I0929 23:10:53.821264 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb8ww\" (UniqueName: \"kubernetes.io/projected/08e3df2f-9fbe-43fd-8ce5-66078f50f007-kube-api-access-rb8ww\") on node \"crc\" DevicePath \"\"" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.089182 4922 generic.go:334] "Generic (PLEG): container finished" podID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerID="675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7" exitCode=0 Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.089259 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z7nm7" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.089260 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z7nm7" event={"ID":"08e3df2f-9fbe-43fd-8ce5-66078f50f007","Type":"ContainerDied","Data":"675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7"} Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.089338 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-z7nm7" event={"ID":"08e3df2f-9fbe-43fd-8ce5-66078f50f007","Type":"ContainerDied","Data":"0332bcd95572a46b33ae62991b2077b42d34a83a21368f14305b00de5471d971"} Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.089372 4922 scope.go:117] "RemoveContainer" containerID="675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.126859 4922 scope.go:117] "RemoveContainer" containerID="784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.148530 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-z7nm7"] Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.158796 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-z7nm7"] Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.174315 4922 scope.go:117] "RemoveContainer" containerID="4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.203988 4922 scope.go:117] "RemoveContainer" containerID="675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7" Sep 29 23:10:54 crc kubenswrapper[4922]: E0929 23:10:54.204713 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7\": container with ID starting with 675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7 not found: ID does not exist" containerID="675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.205008 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7"} err="failed to get container status \"675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7\": rpc error: code = NotFound desc = could not find container \"675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7\": container with ID starting with 675ae10a3504fd0d307341aa2b3414675340867f5d37266f591ea090d50f0fa7 not found: ID does not exist" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.205229 4922 scope.go:117] "RemoveContainer" containerID="784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910" Sep 29 23:10:54 crc kubenswrapper[4922]: E0929 23:10:54.206209 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910\": container with ID starting with 784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910 not found: ID does not exist" containerID="784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.206265 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910"} err="failed to get container status \"784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910\": rpc error: code = NotFound desc = could not find container \"784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910\": container with ID starting with 784205b651fd0a2f9563f1edd1fdfd29c10191f437915feb26697f7c0d57a910 not found: ID does not exist" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.206304 4922 scope.go:117] "RemoveContainer" containerID="4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c" Sep 29 23:10:54 crc kubenswrapper[4922]: E0929 23:10:54.206800 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c\": container with ID starting with 4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c not found: ID does not exist" containerID="4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.206849 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c"} err="failed to get container status \"4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c\": rpc error: code = NotFound desc = could not find container \"4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c\": container with ID starting with 4a94826b35e8baa97264196c19933ed9fccd24165873e4adbda3dd0d8f563c3c not found: ID does not exist" Sep 29 23:10:54 crc kubenswrapper[4922]: I0929 23:10:54.437446 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" path="/var/lib/kubelet/pods/08e3df2f-9fbe-43fd-8ce5-66078f50f007/volumes" Sep 29 23:11:01 crc kubenswrapper[4922]: I0929 23:11:01.421882 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:11:01 crc kubenswrapper[4922]: E0929 23:11:01.423190 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:11:15 crc kubenswrapper[4922]: I0929 23:11:15.421841 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:11:15 crc kubenswrapper[4922]: E0929 23:11:15.422904 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.588347 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dgcs4"] Sep 29 23:11:17 crc kubenswrapper[4922]: E0929 23:11:17.588890 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerName="registry-server" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.588914 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerName="registry-server" Sep 29 23:11:17 crc kubenswrapper[4922]: E0929 23:11:17.588943 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerName="extract-content" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.588957 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerName="extract-content" Sep 29 23:11:17 crc kubenswrapper[4922]: E0929 23:11:17.588976 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerName="extract-utilities" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.588991 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerName="extract-utilities" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.589270 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e3df2f-9fbe-43fd-8ce5-66078f50f007" containerName="registry-server" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.592139 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.616285 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dgcs4"] Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.741246 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gms7\" (UniqueName: \"kubernetes.io/projected/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-kube-api-access-5gms7\") pod \"redhat-operators-dgcs4\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.741310 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-catalog-content\") pod \"redhat-operators-dgcs4\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.741343 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-utilities\") pod \"redhat-operators-dgcs4\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.841984 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gms7\" (UniqueName: \"kubernetes.io/projected/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-kube-api-access-5gms7\") pod \"redhat-operators-dgcs4\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.842596 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-catalog-content\") pod \"redhat-operators-dgcs4\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.842717 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-utilities\") pod \"redhat-operators-dgcs4\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.843096 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-catalog-content\") pod \"redhat-operators-dgcs4\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.843243 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-utilities\") pod \"redhat-operators-dgcs4\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.861590 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gms7\" (UniqueName: \"kubernetes.io/projected/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-kube-api-access-5gms7\") pod \"redhat-operators-dgcs4\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:17 crc kubenswrapper[4922]: I0929 23:11:17.944727 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:18 crc kubenswrapper[4922]: I0929 23:11:18.378736 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dgcs4"] Sep 29 23:11:19 crc kubenswrapper[4922]: I0929 23:11:19.334536 4922 generic.go:334] "Generic (PLEG): container finished" podID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerID="e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637" exitCode=0 Sep 29 23:11:19 crc kubenswrapper[4922]: I0929 23:11:19.334669 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dgcs4" event={"ID":"9f5c686b-ba7a-4677-8d22-f44d0fa23e64","Type":"ContainerDied","Data":"e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637"} Sep 29 23:11:19 crc kubenswrapper[4922]: I0929 23:11:19.334722 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dgcs4" event={"ID":"9f5c686b-ba7a-4677-8d22-f44d0fa23e64","Type":"ContainerStarted","Data":"cf523a27931f15f7d1ee2e6118a1fd081cc1736108edd817496b9f24ae17f9a0"} Sep 29 23:11:19 crc kubenswrapper[4922]: I0929 23:11:19.337095 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 23:11:20 crc kubenswrapper[4922]: I0929 23:11:20.344340 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dgcs4" event={"ID":"9f5c686b-ba7a-4677-8d22-f44d0fa23e64","Type":"ContainerStarted","Data":"04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d"} Sep 29 23:11:21 crc kubenswrapper[4922]: I0929 23:11:21.357751 4922 generic.go:334] "Generic (PLEG): container finished" podID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerID="04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d" exitCode=0 Sep 29 23:11:21 crc kubenswrapper[4922]: I0929 23:11:21.357816 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dgcs4" event={"ID":"9f5c686b-ba7a-4677-8d22-f44d0fa23e64","Type":"ContainerDied","Data":"04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d"} Sep 29 23:11:22 crc kubenswrapper[4922]: I0929 23:11:22.368335 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dgcs4" event={"ID":"9f5c686b-ba7a-4677-8d22-f44d0fa23e64","Type":"ContainerStarted","Data":"a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8"} Sep 29 23:11:22 crc kubenswrapper[4922]: I0929 23:11:22.400792 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dgcs4" podStartSLOduration=2.906772555 podStartE2EDuration="5.400762788s" podCreationTimestamp="2025-09-29 23:11:17 +0000 UTC" firstStartedPulling="2025-09-29 23:11:19.336630426 +0000 UTC m=+2683.646919269" lastFinishedPulling="2025-09-29 23:11:21.830620679 +0000 UTC m=+2686.140909502" observedRunningTime="2025-09-29 23:11:22.395828866 +0000 UTC m=+2686.706117679" watchObservedRunningTime="2025-09-29 23:11:22.400762788 +0000 UTC m=+2686.711051631" Sep 29 23:11:26 crc kubenswrapper[4922]: I0929 23:11:26.428691 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:11:26 crc kubenswrapper[4922]: E0929 23:11:26.429060 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:11:27 crc kubenswrapper[4922]: I0929 23:11:27.945797 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:27 crc kubenswrapper[4922]: I0929 23:11:27.946310 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:29 crc kubenswrapper[4922]: I0929 23:11:29.023549 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dgcs4" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerName="registry-server" probeResult="failure" output=< Sep 29 23:11:29 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 29 23:11:29 crc kubenswrapper[4922]: > Sep 29 23:11:38 crc kubenswrapper[4922]: I0929 23:11:38.023518 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:38 crc kubenswrapper[4922]: I0929 23:11:38.077850 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:38 crc kubenswrapper[4922]: I0929 23:11:38.264636 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dgcs4"] Sep 29 23:11:39 crc kubenswrapper[4922]: I0929 23:11:39.535997 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dgcs4" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerName="registry-server" containerID="cri-o://a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8" gracePeriod=2 Sep 29 23:11:39 crc kubenswrapper[4922]: I0929 23:11:39.988014 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.110289 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gms7\" (UniqueName: \"kubernetes.io/projected/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-kube-api-access-5gms7\") pod \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.110381 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-utilities\") pod \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.110439 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-catalog-content\") pod \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\" (UID: \"9f5c686b-ba7a-4677-8d22-f44d0fa23e64\") " Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.111657 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-utilities" (OuterVolumeSpecName: "utilities") pod "9f5c686b-ba7a-4677-8d22-f44d0fa23e64" (UID: "9f5c686b-ba7a-4677-8d22-f44d0fa23e64"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.118508 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-kube-api-access-5gms7" (OuterVolumeSpecName: "kube-api-access-5gms7") pod "9f5c686b-ba7a-4677-8d22-f44d0fa23e64" (UID: "9f5c686b-ba7a-4677-8d22-f44d0fa23e64"). InnerVolumeSpecName "kube-api-access-5gms7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.209959 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f5c686b-ba7a-4677-8d22-f44d0fa23e64" (UID: "9f5c686b-ba7a-4677-8d22-f44d0fa23e64"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.212612 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gms7\" (UniqueName: \"kubernetes.io/projected/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-kube-api-access-5gms7\") on node \"crc\" DevicePath \"\"" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.212650 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.212664 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f5c686b-ba7a-4677-8d22-f44d0fa23e64-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.421968 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:11:40 crc kubenswrapper[4922]: E0929 23:11:40.514266 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f5c686b_ba7a_4677_8d22_f44d0fa23e64.slice/crio-cf523a27931f15f7d1ee2e6118a1fd081cc1736108edd817496b9f24ae17f9a0\": RecentStats: unable to find data in memory cache]" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.543556 4922 generic.go:334] "Generic (PLEG): container finished" podID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerID="a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8" exitCode=0 Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.543605 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dgcs4" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.543644 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dgcs4" event={"ID":"9f5c686b-ba7a-4677-8d22-f44d0fa23e64","Type":"ContainerDied","Data":"a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8"} Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.544931 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dgcs4" event={"ID":"9f5c686b-ba7a-4677-8d22-f44d0fa23e64","Type":"ContainerDied","Data":"cf523a27931f15f7d1ee2e6118a1fd081cc1736108edd817496b9f24ae17f9a0"} Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.544964 4922 scope.go:117] "RemoveContainer" containerID="a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.567740 4922 scope.go:117] "RemoveContainer" containerID="04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.569061 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dgcs4"] Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.574160 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dgcs4"] Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.587669 4922 scope.go:117] "RemoveContainer" containerID="e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.615470 4922 scope.go:117] "RemoveContainer" containerID="a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8" Sep 29 23:11:40 crc kubenswrapper[4922]: E0929 23:11:40.615955 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8\": container with ID starting with a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8 not found: ID does not exist" containerID="a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.616049 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8"} err="failed to get container status \"a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8\": rpc error: code = NotFound desc = could not find container \"a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8\": container with ID starting with a76f9e57add778e7bfdd316290e0a5c8588f5d36805036b10b50d0701bd61df8 not found: ID does not exist" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.616087 4922 scope.go:117] "RemoveContainer" containerID="04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d" Sep 29 23:11:40 crc kubenswrapper[4922]: E0929 23:11:40.616499 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d\": container with ID starting with 04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d not found: ID does not exist" containerID="04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.616593 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d"} err="failed to get container status \"04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d\": rpc error: code = NotFound desc = could not find container \"04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d\": container with ID starting with 04848ac452cb03c4ec12e3ee6d522aa68919e63468c72e80d58626a72fc7132d not found: ID does not exist" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.616670 4922 scope.go:117] "RemoveContainer" containerID="e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637" Sep 29 23:11:40 crc kubenswrapper[4922]: E0929 23:11:40.616991 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637\": container with ID starting with e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637 not found: ID does not exist" containerID="e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637" Sep 29 23:11:40 crc kubenswrapper[4922]: I0929 23:11:40.617041 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637"} err="failed to get container status \"e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637\": rpc error: code = NotFound desc = could not find container \"e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637\": container with ID starting with e665cfcae6a2b686782153170a910b999a8e2c3cc40272df9b4e569828f95637 not found: ID does not exist" Sep 29 23:11:41 crc kubenswrapper[4922]: I0929 23:11:41.557156 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"a6ce5f0541e057a33539d85b55e25a27b67c02e60a8c2b6b49ba8c4ff2d1c52e"} Sep 29 23:11:42 crc kubenswrapper[4922]: I0929 23:11:42.432943 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" path="/var/lib/kubelet/pods/9f5c686b-ba7a-4677-8d22-f44d0fa23e64/volumes" Sep 29 23:13:58 crc kubenswrapper[4922]: I0929 23:13:58.913478 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:13:58 crc kubenswrapper[4922]: I0929 23:13:58.914253 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:14:28 crc kubenswrapper[4922]: I0929 23:14:28.913310 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:14:28 crc kubenswrapper[4922]: I0929 23:14:28.914016 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:14:58 crc kubenswrapper[4922]: I0929 23:14:58.912903 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:14:58 crc kubenswrapper[4922]: I0929 23:14:58.914046 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:14:58 crc kubenswrapper[4922]: I0929 23:14:58.914109 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:14:58 crc kubenswrapper[4922]: I0929 23:14:58.914941 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a6ce5f0541e057a33539d85b55e25a27b67c02e60a8c2b6b49ba8c4ff2d1c52e"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:14:58 crc kubenswrapper[4922]: I0929 23:14:58.915040 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://a6ce5f0541e057a33539d85b55e25a27b67c02e60a8c2b6b49ba8c4ff2d1c52e" gracePeriod=600 Sep 29 23:14:59 crc kubenswrapper[4922]: I0929 23:14:59.412914 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="a6ce5f0541e057a33539d85b55e25a27b67c02e60a8c2b6b49ba8c4ff2d1c52e" exitCode=0 Sep 29 23:14:59 crc kubenswrapper[4922]: I0929 23:14:59.413007 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"a6ce5f0541e057a33539d85b55e25a27b67c02e60a8c2b6b49ba8c4ff2d1c52e"} Sep 29 23:14:59 crc kubenswrapper[4922]: I0929 23:14:59.413472 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26"} Sep 29 23:14:59 crc kubenswrapper[4922]: I0929 23:14:59.413519 4922 scope.go:117] "RemoveContainer" containerID="d1e6b205f579e6a8e0a5276f3980ad859da2b33ef001fb6a4540cd41271651ab" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.173899 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84"] Sep 29 23:15:00 crc kubenswrapper[4922]: E0929 23:15:00.174622 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerName="registry-server" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.174644 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerName="registry-server" Sep 29 23:15:00 crc kubenswrapper[4922]: E0929 23:15:00.174673 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerName="extract-utilities" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.174683 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerName="extract-utilities" Sep 29 23:15:00 crc kubenswrapper[4922]: E0929 23:15:00.174703 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerName="extract-content" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.174713 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerName="extract-content" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.174923 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f5c686b-ba7a-4677-8d22-f44d0fa23e64" containerName="registry-server" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.175636 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.178629 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.178878 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.180280 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84"] Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.307051 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/831d7d8a-3184-430f-990a-837d8f5437db-config-volume\") pod \"collect-profiles-29319795-48l84\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.307144 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zw5w\" (UniqueName: \"kubernetes.io/projected/831d7d8a-3184-430f-990a-837d8f5437db-kube-api-access-5zw5w\") pod \"collect-profiles-29319795-48l84\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.307191 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/831d7d8a-3184-430f-990a-837d8f5437db-secret-volume\") pod \"collect-profiles-29319795-48l84\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.408503 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zw5w\" (UniqueName: \"kubernetes.io/projected/831d7d8a-3184-430f-990a-837d8f5437db-kube-api-access-5zw5w\") pod \"collect-profiles-29319795-48l84\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.408594 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/831d7d8a-3184-430f-990a-837d8f5437db-secret-volume\") pod \"collect-profiles-29319795-48l84\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.408790 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/831d7d8a-3184-430f-990a-837d8f5437db-config-volume\") pod \"collect-profiles-29319795-48l84\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.410314 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/831d7d8a-3184-430f-990a-837d8f5437db-config-volume\") pod \"collect-profiles-29319795-48l84\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.419504 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/831d7d8a-3184-430f-990a-837d8f5437db-secret-volume\") pod \"collect-profiles-29319795-48l84\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.434171 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zw5w\" (UniqueName: \"kubernetes.io/projected/831d7d8a-3184-430f-990a-837d8f5437db-kube-api-access-5zw5w\") pod \"collect-profiles-29319795-48l84\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.519136 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:00 crc kubenswrapper[4922]: I0929 23:15:00.979953 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84"] Sep 29 23:15:01 crc kubenswrapper[4922]: I0929 23:15:01.438135 4922 generic.go:334] "Generic (PLEG): container finished" podID="831d7d8a-3184-430f-990a-837d8f5437db" containerID="9ab728a18028a9fa5fa11afe8d3a92d7241ea9d5ba0696fd63bcdf4c26f2ec5a" exitCode=0 Sep 29 23:15:01 crc kubenswrapper[4922]: I0929 23:15:01.438220 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" event={"ID":"831d7d8a-3184-430f-990a-837d8f5437db","Type":"ContainerDied","Data":"9ab728a18028a9fa5fa11afe8d3a92d7241ea9d5ba0696fd63bcdf4c26f2ec5a"} Sep 29 23:15:01 crc kubenswrapper[4922]: I0929 23:15:01.438327 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" event={"ID":"831d7d8a-3184-430f-990a-837d8f5437db","Type":"ContainerStarted","Data":"015e7384b72dfca144a7cca2fbe6b5a0f8cc5670f2058949cc39c4a18ecdecdb"} Sep 29 23:15:02 crc kubenswrapper[4922]: I0929 23:15:02.825317 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:02 crc kubenswrapper[4922]: I0929 23:15:02.969033 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/831d7d8a-3184-430f-990a-837d8f5437db-secret-volume\") pod \"831d7d8a-3184-430f-990a-837d8f5437db\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " Sep 29 23:15:02 crc kubenswrapper[4922]: I0929 23:15:02.969215 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zw5w\" (UniqueName: \"kubernetes.io/projected/831d7d8a-3184-430f-990a-837d8f5437db-kube-api-access-5zw5w\") pod \"831d7d8a-3184-430f-990a-837d8f5437db\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " Sep 29 23:15:02 crc kubenswrapper[4922]: I0929 23:15:02.969333 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/831d7d8a-3184-430f-990a-837d8f5437db-config-volume\") pod \"831d7d8a-3184-430f-990a-837d8f5437db\" (UID: \"831d7d8a-3184-430f-990a-837d8f5437db\") " Sep 29 23:15:02 crc kubenswrapper[4922]: I0929 23:15:02.970719 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/831d7d8a-3184-430f-990a-837d8f5437db-config-volume" (OuterVolumeSpecName: "config-volume") pod "831d7d8a-3184-430f-990a-837d8f5437db" (UID: "831d7d8a-3184-430f-990a-837d8f5437db"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:15:02 crc kubenswrapper[4922]: I0929 23:15:02.977561 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831d7d8a-3184-430f-990a-837d8f5437db-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "831d7d8a-3184-430f-990a-837d8f5437db" (UID: "831d7d8a-3184-430f-990a-837d8f5437db"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:15:02 crc kubenswrapper[4922]: I0929 23:15:02.977659 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831d7d8a-3184-430f-990a-837d8f5437db-kube-api-access-5zw5w" (OuterVolumeSpecName: "kube-api-access-5zw5w") pod "831d7d8a-3184-430f-990a-837d8f5437db" (UID: "831d7d8a-3184-430f-990a-837d8f5437db"). InnerVolumeSpecName "kube-api-access-5zw5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:15:03 crc kubenswrapper[4922]: I0929 23:15:03.071187 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zw5w\" (UniqueName: \"kubernetes.io/projected/831d7d8a-3184-430f-990a-837d8f5437db-kube-api-access-5zw5w\") on node \"crc\" DevicePath \"\"" Sep 29 23:15:03 crc kubenswrapper[4922]: I0929 23:15:03.071227 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/831d7d8a-3184-430f-990a-837d8f5437db-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 23:15:03 crc kubenswrapper[4922]: I0929 23:15:03.071330 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/831d7d8a-3184-430f-990a-837d8f5437db-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 23:15:03 crc kubenswrapper[4922]: I0929 23:15:03.457498 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" event={"ID":"831d7d8a-3184-430f-990a-837d8f5437db","Type":"ContainerDied","Data":"015e7384b72dfca144a7cca2fbe6b5a0f8cc5670f2058949cc39c4a18ecdecdb"} Sep 29 23:15:03 crc kubenswrapper[4922]: I0929 23:15:03.457551 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="015e7384b72dfca144a7cca2fbe6b5a0f8cc5670f2058949cc39c4a18ecdecdb" Sep 29 23:15:03 crc kubenswrapper[4922]: I0929 23:15:03.457608 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84" Sep 29 23:15:03 crc kubenswrapper[4922]: I0929 23:15:03.922943 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4"] Sep 29 23:15:03 crc kubenswrapper[4922]: I0929 23:15:03.930184 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319750-gtdv4"] Sep 29 23:15:04 crc kubenswrapper[4922]: I0929 23:15:04.433971 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f84b5d8-c622-4a85-b8da-97861b7ede3f" path="/var/lib/kubelet/pods/8f84b5d8-c622-4a85-b8da-97861b7ede3f/volumes" Sep 29 23:15:50 crc kubenswrapper[4922]: I0929 23:15:50.682986 4922 scope.go:117] "RemoveContainer" containerID="dde905e519db2042b26f09f3a5db1b2366fb264932c3902c5bbadc4a1c1712de" Sep 29 23:16:02 crc kubenswrapper[4922]: I0929 23:16:02.931679 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4d6fn"] Sep 29 23:16:02 crc kubenswrapper[4922]: E0929 23:16:02.932884 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831d7d8a-3184-430f-990a-837d8f5437db" containerName="collect-profiles" Sep 29 23:16:02 crc kubenswrapper[4922]: I0929 23:16:02.932906 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="831d7d8a-3184-430f-990a-837d8f5437db" containerName="collect-profiles" Sep 29 23:16:02 crc kubenswrapper[4922]: I0929 23:16:02.933143 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="831d7d8a-3184-430f-990a-837d8f5437db" containerName="collect-profiles" Sep 29 23:16:02 crc kubenswrapper[4922]: I0929 23:16:02.934925 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:02 crc kubenswrapper[4922]: I0929 23:16:02.953735 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4d6fn"] Sep 29 23:16:02 crc kubenswrapper[4922]: I0929 23:16:02.983596 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-catalog-content\") pod \"certified-operators-4d6fn\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:02 crc kubenswrapper[4922]: I0929 23:16:02.983649 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-utilities\") pod \"certified-operators-4d6fn\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:02 crc kubenswrapper[4922]: I0929 23:16:02.983712 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjmsg\" (UniqueName: \"kubernetes.io/projected/14a02843-b147-4974-93dc-3b585c0949e7-kube-api-access-vjmsg\") pod \"certified-operators-4d6fn\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:03 crc kubenswrapper[4922]: I0929 23:16:03.085165 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-catalog-content\") pod \"certified-operators-4d6fn\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:03 crc kubenswrapper[4922]: I0929 23:16:03.085250 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-utilities\") pod \"certified-operators-4d6fn\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:03 crc kubenswrapper[4922]: I0929 23:16:03.085346 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjmsg\" (UniqueName: \"kubernetes.io/projected/14a02843-b147-4974-93dc-3b585c0949e7-kube-api-access-vjmsg\") pod \"certified-operators-4d6fn\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:03 crc kubenswrapper[4922]: I0929 23:16:03.085787 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-catalog-content\") pod \"certified-operators-4d6fn\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:03 crc kubenswrapper[4922]: I0929 23:16:03.085924 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-utilities\") pod \"certified-operators-4d6fn\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:03 crc kubenswrapper[4922]: I0929 23:16:03.110705 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjmsg\" (UniqueName: \"kubernetes.io/projected/14a02843-b147-4974-93dc-3b585c0949e7-kube-api-access-vjmsg\") pod \"certified-operators-4d6fn\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:03 crc kubenswrapper[4922]: I0929 23:16:03.268087 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:03 crc kubenswrapper[4922]: I0929 23:16:03.752822 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4d6fn"] Sep 29 23:16:04 crc kubenswrapper[4922]: I0929 23:16:04.039567 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4d6fn" event={"ID":"14a02843-b147-4974-93dc-3b585c0949e7","Type":"ContainerDied","Data":"ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e"} Sep 29 23:16:04 crc kubenswrapper[4922]: I0929 23:16:04.039620 4922 generic.go:334] "Generic (PLEG): container finished" podID="14a02843-b147-4974-93dc-3b585c0949e7" containerID="ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e" exitCode=0 Sep 29 23:16:04 crc kubenswrapper[4922]: I0929 23:16:04.039988 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4d6fn" event={"ID":"14a02843-b147-4974-93dc-3b585c0949e7","Type":"ContainerStarted","Data":"f24f10dff8ff3693ee4b372a6e3cc9f39d8455479ee529d781a4412c8eec45a6"} Sep 29 23:16:05 crc kubenswrapper[4922]: I0929 23:16:05.053050 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4d6fn" event={"ID":"14a02843-b147-4974-93dc-3b585c0949e7","Type":"ContainerStarted","Data":"18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e"} Sep 29 23:16:06 crc kubenswrapper[4922]: I0929 23:16:06.078297 4922 generic.go:334] "Generic (PLEG): container finished" podID="14a02843-b147-4974-93dc-3b585c0949e7" containerID="18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e" exitCode=0 Sep 29 23:16:06 crc kubenswrapper[4922]: I0929 23:16:06.079444 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4d6fn" event={"ID":"14a02843-b147-4974-93dc-3b585c0949e7","Type":"ContainerDied","Data":"18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e"} Sep 29 23:16:07 crc kubenswrapper[4922]: I0929 23:16:07.091996 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4d6fn" event={"ID":"14a02843-b147-4974-93dc-3b585c0949e7","Type":"ContainerStarted","Data":"6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20"} Sep 29 23:16:07 crc kubenswrapper[4922]: I0929 23:16:07.118449 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4d6fn" podStartSLOduration=2.604586689 podStartE2EDuration="5.11837792s" podCreationTimestamp="2025-09-29 23:16:02 +0000 UTC" firstStartedPulling="2025-09-29 23:16:04.042155423 +0000 UTC m=+2968.352444256" lastFinishedPulling="2025-09-29 23:16:06.555946674 +0000 UTC m=+2970.866235487" observedRunningTime="2025-09-29 23:16:07.112468054 +0000 UTC m=+2971.422756907" watchObservedRunningTime="2025-09-29 23:16:07.11837792 +0000 UTC m=+2971.428666763" Sep 29 23:16:13 crc kubenswrapper[4922]: I0929 23:16:13.268890 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:13 crc kubenswrapper[4922]: I0929 23:16:13.269794 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:13 crc kubenswrapper[4922]: I0929 23:16:13.347892 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:14 crc kubenswrapper[4922]: I0929 23:16:14.241112 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:14 crc kubenswrapper[4922]: I0929 23:16:14.296210 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4d6fn"] Sep 29 23:16:16 crc kubenswrapper[4922]: I0929 23:16:16.181759 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4d6fn" podUID="14a02843-b147-4974-93dc-3b585c0949e7" containerName="registry-server" containerID="cri-o://6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20" gracePeriod=2 Sep 29 23:16:16 crc kubenswrapper[4922]: I0929 23:16:16.741214 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:16 crc kubenswrapper[4922]: I0929 23:16:16.901683 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-catalog-content\") pod \"14a02843-b147-4974-93dc-3b585c0949e7\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " Sep 29 23:16:16 crc kubenswrapper[4922]: I0929 23:16:16.901806 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjmsg\" (UniqueName: \"kubernetes.io/projected/14a02843-b147-4974-93dc-3b585c0949e7-kube-api-access-vjmsg\") pod \"14a02843-b147-4974-93dc-3b585c0949e7\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " Sep 29 23:16:16 crc kubenswrapper[4922]: I0929 23:16:16.901910 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-utilities\") pod \"14a02843-b147-4974-93dc-3b585c0949e7\" (UID: \"14a02843-b147-4974-93dc-3b585c0949e7\") " Sep 29 23:16:16 crc kubenswrapper[4922]: I0929 23:16:16.904019 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-utilities" (OuterVolumeSpecName: "utilities") pod "14a02843-b147-4974-93dc-3b585c0949e7" (UID: "14a02843-b147-4974-93dc-3b585c0949e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:16:16 crc kubenswrapper[4922]: I0929 23:16:16.912718 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a02843-b147-4974-93dc-3b585c0949e7-kube-api-access-vjmsg" (OuterVolumeSpecName: "kube-api-access-vjmsg") pod "14a02843-b147-4974-93dc-3b585c0949e7" (UID: "14a02843-b147-4974-93dc-3b585c0949e7"). InnerVolumeSpecName "kube-api-access-vjmsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:16:16 crc kubenswrapper[4922]: I0929 23:16:16.957884 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14a02843-b147-4974-93dc-3b585c0949e7" (UID: "14a02843-b147-4974-93dc-3b585c0949e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.004205 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjmsg\" (UniqueName: \"kubernetes.io/projected/14a02843-b147-4974-93dc-3b585c0949e7-kube-api-access-vjmsg\") on node \"crc\" DevicePath \"\"" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.004536 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.004614 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a02843-b147-4974-93dc-3b585c0949e7-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.196538 4922 generic.go:334] "Generic (PLEG): container finished" podID="14a02843-b147-4974-93dc-3b585c0949e7" containerID="6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20" exitCode=0 Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.196616 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4d6fn" event={"ID":"14a02843-b147-4974-93dc-3b585c0949e7","Type":"ContainerDied","Data":"6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20"} Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.196660 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4d6fn" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.196679 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4d6fn" event={"ID":"14a02843-b147-4974-93dc-3b585c0949e7","Type":"ContainerDied","Data":"f24f10dff8ff3693ee4b372a6e3cc9f39d8455479ee529d781a4412c8eec45a6"} Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.196716 4922 scope.go:117] "RemoveContainer" containerID="6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.228281 4922 scope.go:117] "RemoveContainer" containerID="18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.258043 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4d6fn"] Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.268956 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4d6fn"] Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.277059 4922 scope.go:117] "RemoveContainer" containerID="ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.315144 4922 scope.go:117] "RemoveContainer" containerID="6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20" Sep 29 23:16:17 crc kubenswrapper[4922]: E0929 23:16:17.315599 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20\": container with ID starting with 6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20 not found: ID does not exist" containerID="6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.315785 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20"} err="failed to get container status \"6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20\": rpc error: code = NotFound desc = could not find container \"6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20\": container with ID starting with 6e57746792d233117504d56be3b6d53f2745e44c7e6c771ef19c226dbbea3c20 not found: ID does not exist" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.316092 4922 scope.go:117] "RemoveContainer" containerID="18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e" Sep 29 23:16:17 crc kubenswrapper[4922]: E0929 23:16:17.316704 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e\": container with ID starting with 18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e not found: ID does not exist" containerID="18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.316767 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e"} err="failed to get container status \"18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e\": rpc error: code = NotFound desc = could not find container \"18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e\": container with ID starting with 18292d47b7b86960c9eb8518452cef1a32809df9be32d606508d1d53f9d7e62e not found: ID does not exist" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.316824 4922 scope.go:117] "RemoveContainer" containerID="ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e" Sep 29 23:16:17 crc kubenswrapper[4922]: E0929 23:16:17.317342 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e\": container with ID starting with ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e not found: ID does not exist" containerID="ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e" Sep 29 23:16:17 crc kubenswrapper[4922]: I0929 23:16:17.317372 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e"} err="failed to get container status \"ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e\": rpc error: code = NotFound desc = could not find container \"ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e\": container with ID starting with ad49fce6b2b4f4bfba3c6a331458ac9ea9b4c2c11ce9f46b149c51fab6e3b93e not found: ID does not exist" Sep 29 23:16:18 crc kubenswrapper[4922]: I0929 23:16:18.444546 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a02843-b147-4974-93dc-3b585c0949e7" path="/var/lib/kubelet/pods/14a02843-b147-4974-93dc-3b585c0949e7/volumes" Sep 29 23:17:28 crc kubenswrapper[4922]: I0929 23:17:28.912984 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:17:28 crc kubenswrapper[4922]: I0929 23:17:28.913917 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:17:58 crc kubenswrapper[4922]: I0929 23:17:58.912501 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:17:58 crc kubenswrapper[4922]: I0929 23:17:58.913269 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:18:28 crc kubenswrapper[4922]: I0929 23:18:28.913055 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:18:28 crc kubenswrapper[4922]: I0929 23:18:28.913808 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:18:28 crc kubenswrapper[4922]: I0929 23:18:28.913875 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:18:28 crc kubenswrapper[4922]: I0929 23:18:28.914643 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:18:28 crc kubenswrapper[4922]: I0929 23:18:28.914743 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" gracePeriod=600 Sep 29 23:18:29 crc kubenswrapper[4922]: E0929 23:18:29.056307 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:18:29 crc kubenswrapper[4922]: I0929 23:18:29.450527 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" exitCode=0 Sep 29 23:18:29 crc kubenswrapper[4922]: I0929 23:18:29.450578 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26"} Sep 29 23:18:29 crc kubenswrapper[4922]: I0929 23:18:29.450621 4922 scope.go:117] "RemoveContainer" containerID="a6ce5f0541e057a33539d85b55e25a27b67c02e60a8c2b6b49ba8c4ff2d1c52e" Sep 29 23:18:29 crc kubenswrapper[4922]: I0929 23:18:29.451263 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:18:29 crc kubenswrapper[4922]: E0929 23:18:29.451850 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:18:40 crc kubenswrapper[4922]: I0929 23:18:40.422952 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:18:40 crc kubenswrapper[4922]: E0929 23:18:40.423918 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:18:53 crc kubenswrapper[4922]: I0929 23:18:53.422795 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:18:53 crc kubenswrapper[4922]: E0929 23:18:53.423900 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:19:05 crc kubenswrapper[4922]: I0929 23:19:05.421994 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:19:05 crc kubenswrapper[4922]: E0929 23:19:05.423044 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:19:18 crc kubenswrapper[4922]: I0929 23:19:18.423468 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:19:18 crc kubenswrapper[4922]: E0929 23:19:18.424706 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:19:33 crc kubenswrapper[4922]: I0929 23:19:33.422647 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:19:33 crc kubenswrapper[4922]: E0929 23:19:33.423675 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:19:44 crc kubenswrapper[4922]: I0929 23:19:44.422658 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:19:44 crc kubenswrapper[4922]: E0929 23:19:44.424048 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:19:55 crc kubenswrapper[4922]: I0929 23:19:55.422688 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:19:55 crc kubenswrapper[4922]: E0929 23:19:55.423635 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:20:08 crc kubenswrapper[4922]: I0929 23:20:08.422420 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:20:08 crc kubenswrapper[4922]: E0929 23:20:08.423711 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:20:19 crc kubenswrapper[4922]: I0929 23:20:19.422682 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:20:19 crc kubenswrapper[4922]: E0929 23:20:19.423596 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:20:34 crc kubenswrapper[4922]: I0929 23:20:34.422039 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:20:34 crc kubenswrapper[4922]: E0929 23:20:34.422960 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:20:45 crc kubenswrapper[4922]: I0929 23:20:45.421902 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:20:45 crc kubenswrapper[4922]: E0929 23:20:45.422734 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:20:57 crc kubenswrapper[4922]: I0929 23:20:57.422847 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:20:57 crc kubenswrapper[4922]: E0929 23:20:57.423950 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:21:11 crc kubenswrapper[4922]: I0929 23:21:11.422149 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:21:11 crc kubenswrapper[4922]: E0929 23:21:11.423167 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:21:23 crc kubenswrapper[4922]: I0929 23:21:23.423500 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:21:23 crc kubenswrapper[4922]: E0929 23:21:23.424640 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:21:37 crc kubenswrapper[4922]: I0929 23:21:37.422064 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:21:37 crc kubenswrapper[4922]: E0929 23:21:37.423055 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:21:48 crc kubenswrapper[4922]: I0929 23:21:48.422003 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:21:48 crc kubenswrapper[4922]: E0929 23:21:48.423150 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:21:59 crc kubenswrapper[4922]: I0929 23:21:59.421993 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:21:59 crc kubenswrapper[4922]: E0929 23:21:59.422978 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.728816 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-77m65"] Sep 29 23:22:00 crc kubenswrapper[4922]: E0929 23:22:00.729591 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a02843-b147-4974-93dc-3b585c0949e7" containerName="extract-content" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.729625 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a02843-b147-4974-93dc-3b585c0949e7" containerName="extract-content" Sep 29 23:22:00 crc kubenswrapper[4922]: E0929 23:22:00.729658 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a02843-b147-4974-93dc-3b585c0949e7" containerName="registry-server" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.729676 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a02843-b147-4974-93dc-3b585c0949e7" containerName="registry-server" Sep 29 23:22:00 crc kubenswrapper[4922]: E0929 23:22:00.729707 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a02843-b147-4974-93dc-3b585c0949e7" containerName="extract-utilities" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.729725 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a02843-b147-4974-93dc-3b585c0949e7" containerName="extract-utilities" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.730104 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a02843-b147-4974-93dc-3b585c0949e7" containerName="registry-server" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.732465 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.764178 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-77m65"] Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.862928 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-catalog-content\") pod \"redhat-marketplace-77m65\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.862977 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-utilities\") pod \"redhat-marketplace-77m65\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.863025 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzx7x\" (UniqueName: \"kubernetes.io/projected/f886b656-439a-4871-b2aa-f3af5439c631-kube-api-access-pzx7x\") pod \"redhat-marketplace-77m65\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.964230 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-catalog-content\") pod \"redhat-marketplace-77m65\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.964296 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-utilities\") pod \"redhat-marketplace-77m65\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.964352 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzx7x\" (UniqueName: \"kubernetes.io/projected/f886b656-439a-4871-b2aa-f3af5439c631-kube-api-access-pzx7x\") pod \"redhat-marketplace-77m65\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.964869 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-utilities\") pod \"redhat-marketplace-77m65\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.965168 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-catalog-content\") pod \"redhat-marketplace-77m65\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:00 crc kubenswrapper[4922]: I0929 23:22:00.990308 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzx7x\" (UniqueName: \"kubernetes.io/projected/f886b656-439a-4871-b2aa-f3af5439c631-kube-api-access-pzx7x\") pod \"redhat-marketplace-77m65\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:01 crc kubenswrapper[4922]: I0929 23:22:01.109271 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:01 crc kubenswrapper[4922]: I0929 23:22:01.357805 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-77m65"] Sep 29 23:22:01 crc kubenswrapper[4922]: I0929 23:22:01.440874 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-77m65" event={"ID":"f886b656-439a-4871-b2aa-f3af5439c631","Type":"ContainerStarted","Data":"e04343bc51ae795ec92d308834033d1b04492607f8305e8935592c2aafc91dec"} Sep 29 23:22:02 crc kubenswrapper[4922]: I0929 23:22:02.451067 4922 generic.go:334] "Generic (PLEG): container finished" podID="f886b656-439a-4871-b2aa-f3af5439c631" containerID="d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4" exitCode=0 Sep 29 23:22:02 crc kubenswrapper[4922]: I0929 23:22:02.451304 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-77m65" event={"ID":"f886b656-439a-4871-b2aa-f3af5439c631","Type":"ContainerDied","Data":"d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4"} Sep 29 23:22:02 crc kubenswrapper[4922]: I0929 23:22:02.455133 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 23:22:03 crc kubenswrapper[4922]: I0929 23:22:03.462976 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-77m65" event={"ID":"f886b656-439a-4871-b2aa-f3af5439c631","Type":"ContainerStarted","Data":"2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a"} Sep 29 23:22:04 crc kubenswrapper[4922]: I0929 23:22:04.477362 4922 generic.go:334] "Generic (PLEG): container finished" podID="f886b656-439a-4871-b2aa-f3af5439c631" containerID="2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a" exitCode=0 Sep 29 23:22:04 crc kubenswrapper[4922]: I0929 23:22:04.477448 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-77m65" event={"ID":"f886b656-439a-4871-b2aa-f3af5439c631","Type":"ContainerDied","Data":"2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a"} Sep 29 23:22:05 crc kubenswrapper[4922]: I0929 23:22:05.487812 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-77m65" event={"ID":"f886b656-439a-4871-b2aa-f3af5439c631","Type":"ContainerStarted","Data":"b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a"} Sep 29 23:22:10 crc kubenswrapper[4922]: I0929 23:22:10.421687 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:22:10 crc kubenswrapper[4922]: E0929 23:22:10.422438 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:22:11 crc kubenswrapper[4922]: I0929 23:22:11.110280 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:11 crc kubenswrapper[4922]: I0929 23:22:11.110722 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:11 crc kubenswrapper[4922]: I0929 23:22:11.186122 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:11 crc kubenswrapper[4922]: I0929 23:22:11.217792 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-77m65" podStartSLOduration=8.816264425 podStartE2EDuration="11.217758742s" podCreationTimestamp="2025-09-29 23:22:00 +0000 UTC" firstStartedPulling="2025-09-29 23:22:02.454947241 +0000 UTC m=+3326.765236054" lastFinishedPulling="2025-09-29 23:22:04.856441548 +0000 UTC m=+3329.166730371" observedRunningTime="2025-09-29 23:22:05.507230771 +0000 UTC m=+3329.817519594" watchObservedRunningTime="2025-09-29 23:22:11.217758742 +0000 UTC m=+3335.528047585" Sep 29 23:22:11 crc kubenswrapper[4922]: I0929 23:22:11.610741 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:11 crc kubenswrapper[4922]: I0929 23:22:11.671209 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-77m65"] Sep 29 23:22:13 crc kubenswrapper[4922]: I0929 23:22:13.555076 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-77m65" podUID="f886b656-439a-4871-b2aa-f3af5439c631" containerName="registry-server" containerID="cri-o://b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a" gracePeriod=2 Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.020982 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.073070 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-utilities\") pod \"f886b656-439a-4871-b2aa-f3af5439c631\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.073245 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-catalog-content\") pod \"f886b656-439a-4871-b2aa-f3af5439c631\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.074110 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-utilities" (OuterVolumeSpecName: "utilities") pod "f886b656-439a-4871-b2aa-f3af5439c631" (UID: "f886b656-439a-4871-b2aa-f3af5439c631"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.074668 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzx7x\" (UniqueName: \"kubernetes.io/projected/f886b656-439a-4871-b2aa-f3af5439c631-kube-api-access-pzx7x\") pod \"f886b656-439a-4871-b2aa-f3af5439c631\" (UID: \"f886b656-439a-4871-b2aa-f3af5439c631\") " Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.075385 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.084691 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f886b656-439a-4871-b2aa-f3af5439c631-kube-api-access-pzx7x" (OuterVolumeSpecName: "kube-api-access-pzx7x") pod "f886b656-439a-4871-b2aa-f3af5439c631" (UID: "f886b656-439a-4871-b2aa-f3af5439c631"). InnerVolumeSpecName "kube-api-access-pzx7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.098114 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f886b656-439a-4871-b2aa-f3af5439c631" (UID: "f886b656-439a-4871-b2aa-f3af5439c631"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.176827 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f886b656-439a-4871-b2aa-f3af5439c631-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.176869 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzx7x\" (UniqueName: \"kubernetes.io/projected/f886b656-439a-4871-b2aa-f3af5439c631-kube-api-access-pzx7x\") on node \"crc\" DevicePath \"\"" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.568888 4922 generic.go:334] "Generic (PLEG): container finished" podID="f886b656-439a-4871-b2aa-f3af5439c631" containerID="b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a" exitCode=0 Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.568957 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-77m65" event={"ID":"f886b656-439a-4871-b2aa-f3af5439c631","Type":"ContainerDied","Data":"b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a"} Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.569525 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-77m65" event={"ID":"f886b656-439a-4871-b2aa-f3af5439c631","Type":"ContainerDied","Data":"e04343bc51ae795ec92d308834033d1b04492607f8305e8935592c2aafc91dec"} Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.569573 4922 scope.go:117] "RemoveContainer" containerID="b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.568969 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-77m65" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.602892 4922 scope.go:117] "RemoveContainer" containerID="2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.605552 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-77m65"] Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.615657 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-77m65"] Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.631014 4922 scope.go:117] "RemoveContainer" containerID="d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.685779 4922 scope.go:117] "RemoveContainer" containerID="b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a" Sep 29 23:22:14 crc kubenswrapper[4922]: E0929 23:22:14.686779 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a\": container with ID starting with b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a not found: ID does not exist" containerID="b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.686840 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a"} err="failed to get container status \"b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a\": rpc error: code = NotFound desc = could not find container \"b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a\": container with ID starting with b48f386cf4baa80ea4a7c2226346346f052fee98f9e7a541285ac47ade47784a not found: ID does not exist" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.686886 4922 scope.go:117] "RemoveContainer" containerID="2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a" Sep 29 23:22:14 crc kubenswrapper[4922]: E0929 23:22:14.687427 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a\": container with ID starting with 2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a not found: ID does not exist" containerID="2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.687492 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a"} err="failed to get container status \"2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a\": rpc error: code = NotFound desc = could not find container \"2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a\": container with ID starting with 2fcd5fa58bbb1bc7549d1dd15643740ccd2c393d6408aa3a73f021667bebd40a not found: ID does not exist" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.687535 4922 scope.go:117] "RemoveContainer" containerID="d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4" Sep 29 23:22:14 crc kubenswrapper[4922]: E0929 23:22:14.688312 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4\": container with ID starting with d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4 not found: ID does not exist" containerID="d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4" Sep 29 23:22:14 crc kubenswrapper[4922]: I0929 23:22:14.688362 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4"} err="failed to get container status \"d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4\": rpc error: code = NotFound desc = could not find container \"d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4\": container with ID starting with d207709d7821e4430877cb7d3981eccf02a916fb272e1247b16350becaabebe4 not found: ID does not exist" Sep 29 23:22:16 crc kubenswrapper[4922]: I0929 23:22:16.437613 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f886b656-439a-4871-b2aa-f3af5439c631" path="/var/lib/kubelet/pods/f886b656-439a-4871-b2aa-f3af5439c631/volumes" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.048528 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cjmhg"] Sep 29 23:22:21 crc kubenswrapper[4922]: E0929 23:22:21.049108 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f886b656-439a-4871-b2aa-f3af5439c631" containerName="extract-content" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.049122 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f886b656-439a-4871-b2aa-f3af5439c631" containerName="extract-content" Sep 29 23:22:21 crc kubenswrapper[4922]: E0929 23:22:21.049146 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f886b656-439a-4871-b2aa-f3af5439c631" containerName="extract-utilities" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.049154 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f886b656-439a-4871-b2aa-f3af5439c631" containerName="extract-utilities" Sep 29 23:22:21 crc kubenswrapper[4922]: E0929 23:22:21.049172 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f886b656-439a-4871-b2aa-f3af5439c631" containerName="registry-server" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.049181 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f886b656-439a-4871-b2aa-f3af5439c631" containerName="registry-server" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.049354 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f886b656-439a-4871-b2aa-f3af5439c631" containerName="registry-server" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.050582 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.078708 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjmhg"] Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.179891 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvpxm\" (UniqueName: \"kubernetes.io/projected/5b954263-a1b9-4408-92fd-356e46202600-kube-api-access-gvpxm\") pod \"redhat-operators-cjmhg\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.180156 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-utilities\") pod \"redhat-operators-cjmhg\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.180247 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-catalog-content\") pod \"redhat-operators-cjmhg\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.282160 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-catalog-content\") pod \"redhat-operators-cjmhg\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.282279 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvpxm\" (UniqueName: \"kubernetes.io/projected/5b954263-a1b9-4408-92fd-356e46202600-kube-api-access-gvpxm\") pod \"redhat-operators-cjmhg\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.282376 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-utilities\") pod \"redhat-operators-cjmhg\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.283095 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-utilities\") pod \"redhat-operators-cjmhg\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.283119 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-catalog-content\") pod \"redhat-operators-cjmhg\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.307032 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvpxm\" (UniqueName: \"kubernetes.io/projected/5b954263-a1b9-4408-92fd-356e46202600-kube-api-access-gvpxm\") pod \"redhat-operators-cjmhg\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.374337 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:21 crc kubenswrapper[4922]: I0929 23:22:21.821680 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjmhg"] Sep 29 23:22:22 crc kubenswrapper[4922]: I0929 23:22:22.641209 4922 generic.go:334] "Generic (PLEG): container finished" podID="5b954263-a1b9-4408-92fd-356e46202600" containerID="04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d" exitCode=0 Sep 29 23:22:22 crc kubenswrapper[4922]: I0929 23:22:22.641256 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjmhg" event={"ID":"5b954263-a1b9-4408-92fd-356e46202600","Type":"ContainerDied","Data":"04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d"} Sep 29 23:22:22 crc kubenswrapper[4922]: I0929 23:22:22.641287 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjmhg" event={"ID":"5b954263-a1b9-4408-92fd-356e46202600","Type":"ContainerStarted","Data":"fad6ae32d65f89441d4a1b3f0ca8f465ce4c08c09b27b255d7c18d068d73a58c"} Sep 29 23:22:23 crc kubenswrapper[4922]: I0929 23:22:23.422323 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:22:23 crc kubenswrapper[4922]: E0929 23:22:23.422859 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:22:24 crc kubenswrapper[4922]: I0929 23:22:24.662828 4922 generic.go:334] "Generic (PLEG): container finished" podID="5b954263-a1b9-4408-92fd-356e46202600" containerID="5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba" exitCode=0 Sep 29 23:22:24 crc kubenswrapper[4922]: I0929 23:22:24.663068 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjmhg" event={"ID":"5b954263-a1b9-4408-92fd-356e46202600","Type":"ContainerDied","Data":"5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba"} Sep 29 23:22:25 crc kubenswrapper[4922]: I0929 23:22:25.674982 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjmhg" event={"ID":"5b954263-a1b9-4408-92fd-356e46202600","Type":"ContainerStarted","Data":"2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532"} Sep 29 23:22:25 crc kubenswrapper[4922]: I0929 23:22:25.710310 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cjmhg" podStartSLOduration=2.306292688 podStartE2EDuration="4.710284598s" podCreationTimestamp="2025-09-29 23:22:21 +0000 UTC" firstStartedPulling="2025-09-29 23:22:22.643001842 +0000 UTC m=+3346.953290655" lastFinishedPulling="2025-09-29 23:22:25.046993712 +0000 UTC m=+3349.357282565" observedRunningTime="2025-09-29 23:22:25.700060331 +0000 UTC m=+3350.010349154" watchObservedRunningTime="2025-09-29 23:22:25.710284598 +0000 UTC m=+3350.020573451" Sep 29 23:22:27 crc kubenswrapper[4922]: I0929 23:22:27.836855 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zv6ck"] Sep 29 23:22:27 crc kubenswrapper[4922]: I0929 23:22:27.839144 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:27 crc kubenswrapper[4922]: I0929 23:22:27.862556 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zv6ck"] Sep 29 23:22:27 crc kubenswrapper[4922]: I0929 23:22:27.912062 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nswjh\" (UniqueName: \"kubernetes.io/projected/380353ba-1abe-4fa8-8c51-7a03c912d7fb-kube-api-access-nswjh\") pod \"community-operators-zv6ck\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:27 crc kubenswrapper[4922]: I0929 23:22:27.912614 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-utilities\") pod \"community-operators-zv6ck\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:27 crc kubenswrapper[4922]: I0929 23:22:27.912667 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-catalog-content\") pod \"community-operators-zv6ck\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:28 crc kubenswrapper[4922]: I0929 23:22:28.014289 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nswjh\" (UniqueName: \"kubernetes.io/projected/380353ba-1abe-4fa8-8c51-7a03c912d7fb-kube-api-access-nswjh\") pod \"community-operators-zv6ck\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:28 crc kubenswrapper[4922]: I0929 23:22:28.014377 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-utilities\") pod \"community-operators-zv6ck\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:28 crc kubenswrapper[4922]: I0929 23:22:28.014468 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-catalog-content\") pod \"community-operators-zv6ck\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:28 crc kubenswrapper[4922]: I0929 23:22:28.015011 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-utilities\") pod \"community-operators-zv6ck\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:28 crc kubenswrapper[4922]: I0929 23:22:28.015154 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-catalog-content\") pod \"community-operators-zv6ck\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:28 crc kubenswrapper[4922]: I0929 23:22:28.038567 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nswjh\" (UniqueName: \"kubernetes.io/projected/380353ba-1abe-4fa8-8c51-7a03c912d7fb-kube-api-access-nswjh\") pod \"community-operators-zv6ck\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:28 crc kubenswrapper[4922]: I0929 23:22:28.167913 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:28 crc kubenswrapper[4922]: I0929 23:22:28.658208 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zv6ck"] Sep 29 23:22:28 crc kubenswrapper[4922]: I0929 23:22:28.699637 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zv6ck" event={"ID":"380353ba-1abe-4fa8-8c51-7a03c912d7fb","Type":"ContainerStarted","Data":"760813dc7d6d1eb99c16ee275ba34db2dc4280c4bb0da5fa356160624fb66095"} Sep 29 23:22:29 crc kubenswrapper[4922]: I0929 23:22:29.712065 4922 generic.go:334] "Generic (PLEG): container finished" podID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerID="6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790" exitCode=0 Sep 29 23:22:29 crc kubenswrapper[4922]: I0929 23:22:29.712118 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zv6ck" event={"ID":"380353ba-1abe-4fa8-8c51-7a03c912d7fb","Type":"ContainerDied","Data":"6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790"} Sep 29 23:22:30 crc kubenswrapper[4922]: I0929 23:22:30.729364 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zv6ck" event={"ID":"380353ba-1abe-4fa8-8c51-7a03c912d7fb","Type":"ContainerStarted","Data":"7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69"} Sep 29 23:22:31 crc kubenswrapper[4922]: I0929 23:22:31.375278 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:31 crc kubenswrapper[4922]: I0929 23:22:31.375931 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:31 crc kubenswrapper[4922]: I0929 23:22:31.451377 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:31 crc kubenswrapper[4922]: I0929 23:22:31.741703 4922 generic.go:334] "Generic (PLEG): container finished" podID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerID="7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69" exitCode=0 Sep 29 23:22:31 crc kubenswrapper[4922]: I0929 23:22:31.741800 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zv6ck" event={"ID":"380353ba-1abe-4fa8-8c51-7a03c912d7fb","Type":"ContainerDied","Data":"7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69"} Sep 29 23:22:31 crc kubenswrapper[4922]: I0929 23:22:31.823525 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:32 crc kubenswrapper[4922]: I0929 23:22:32.754988 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zv6ck" event={"ID":"380353ba-1abe-4fa8-8c51-7a03c912d7fb","Type":"ContainerStarted","Data":"41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0"} Sep 29 23:22:32 crc kubenswrapper[4922]: I0929 23:22:32.786939 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zv6ck" podStartSLOduration=3.357337683 podStartE2EDuration="5.786910796s" podCreationTimestamp="2025-09-29 23:22:27 +0000 UTC" firstStartedPulling="2025-09-29 23:22:29.714613194 +0000 UTC m=+3354.024902047" lastFinishedPulling="2025-09-29 23:22:32.144186347 +0000 UTC m=+3356.454475160" observedRunningTime="2025-09-29 23:22:32.785325516 +0000 UTC m=+3357.095614379" watchObservedRunningTime="2025-09-29 23:22:32.786910796 +0000 UTC m=+3357.097199639" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.047624 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjmhg"] Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.048867 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cjmhg" podUID="5b954263-a1b9-4408-92fd-356e46202600" containerName="registry-server" containerID="cri-o://2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532" gracePeriod=2 Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.707181 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.787058 4922 generic.go:334] "Generic (PLEG): container finished" podID="5b954263-a1b9-4408-92fd-356e46202600" containerID="2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532" exitCode=0 Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.787117 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjmhg" event={"ID":"5b954263-a1b9-4408-92fd-356e46202600","Type":"ContainerDied","Data":"2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532"} Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.787144 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjmhg" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.787170 4922 scope.go:117] "RemoveContainer" containerID="2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.787153 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjmhg" event={"ID":"5b954263-a1b9-4408-92fd-356e46202600","Type":"ContainerDied","Data":"fad6ae32d65f89441d4a1b3f0ca8f465ce4c08c09b27b255d7c18d068d73a58c"} Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.816661 4922 scope.go:117] "RemoveContainer" containerID="5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.847786 4922 scope.go:117] "RemoveContainer" containerID="04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.858540 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvpxm\" (UniqueName: \"kubernetes.io/projected/5b954263-a1b9-4408-92fd-356e46202600-kube-api-access-gvpxm\") pod \"5b954263-a1b9-4408-92fd-356e46202600\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.858679 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-catalog-content\") pod \"5b954263-a1b9-4408-92fd-356e46202600\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.858951 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-utilities\") pod \"5b954263-a1b9-4408-92fd-356e46202600\" (UID: \"5b954263-a1b9-4408-92fd-356e46202600\") " Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.860432 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-utilities" (OuterVolumeSpecName: "utilities") pod "5b954263-a1b9-4408-92fd-356e46202600" (UID: "5b954263-a1b9-4408-92fd-356e46202600"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.869923 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b954263-a1b9-4408-92fd-356e46202600-kube-api-access-gvpxm" (OuterVolumeSpecName: "kube-api-access-gvpxm") pod "5b954263-a1b9-4408-92fd-356e46202600" (UID: "5b954263-a1b9-4408-92fd-356e46202600"). InnerVolumeSpecName "kube-api-access-gvpxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.919272 4922 scope.go:117] "RemoveContainer" containerID="2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532" Sep 29 23:22:35 crc kubenswrapper[4922]: E0929 23:22:35.920375 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532\": container with ID starting with 2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532 not found: ID does not exist" containerID="2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.920446 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532"} err="failed to get container status \"2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532\": rpc error: code = NotFound desc = could not find container \"2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532\": container with ID starting with 2b9f43cec5e3b07d942317563bb1af32ae157e1e30ca65d859a8b0e9682ae532 not found: ID does not exist" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.920478 4922 scope.go:117] "RemoveContainer" containerID="5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba" Sep 29 23:22:35 crc kubenswrapper[4922]: E0929 23:22:35.921120 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba\": container with ID starting with 5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba not found: ID does not exist" containerID="5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.921178 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba"} err="failed to get container status \"5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba\": rpc error: code = NotFound desc = could not find container \"5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba\": container with ID starting with 5cd59ae86cfde6d08a528bb4101627aa2abad7d65c1f45aed945ba76f6108cba not found: ID does not exist" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.921217 4922 scope.go:117] "RemoveContainer" containerID="04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d" Sep 29 23:22:35 crc kubenswrapper[4922]: E0929 23:22:35.921766 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d\": container with ID starting with 04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d not found: ID does not exist" containerID="04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.921800 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d"} err="failed to get container status \"04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d\": rpc error: code = NotFound desc = could not find container \"04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d\": container with ID starting with 04a4fbc3e3b9f407297df0045510aa5504676a39cd17d6e3ed57a3e83c2d594d not found: ID does not exist" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.960473 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:22:35 crc kubenswrapper[4922]: I0929 23:22:35.960512 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvpxm\" (UniqueName: \"kubernetes.io/projected/5b954263-a1b9-4408-92fd-356e46202600-kube-api-access-gvpxm\") on node \"crc\" DevicePath \"\"" Sep 29 23:22:36 crc kubenswrapper[4922]: I0929 23:22:36.010023 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5b954263-a1b9-4408-92fd-356e46202600" (UID: "5b954263-a1b9-4408-92fd-356e46202600"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:22:36 crc kubenswrapper[4922]: I0929 23:22:36.061476 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b954263-a1b9-4408-92fd-356e46202600-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:22:36 crc kubenswrapper[4922]: I0929 23:22:36.128423 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjmhg"] Sep 29 23:22:36 crc kubenswrapper[4922]: I0929 23:22:36.134146 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cjmhg"] Sep 29 23:22:36 crc kubenswrapper[4922]: I0929 23:22:36.441905 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b954263-a1b9-4408-92fd-356e46202600" path="/var/lib/kubelet/pods/5b954263-a1b9-4408-92fd-356e46202600/volumes" Sep 29 23:22:37 crc kubenswrapper[4922]: I0929 23:22:37.421902 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:22:37 crc kubenswrapper[4922]: E0929 23:22:37.422614 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:22:38 crc kubenswrapper[4922]: I0929 23:22:38.168571 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:38 crc kubenswrapper[4922]: I0929 23:22:38.168636 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:38 crc kubenswrapper[4922]: I0929 23:22:38.247800 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:38 crc kubenswrapper[4922]: I0929 23:22:38.889737 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:39 crc kubenswrapper[4922]: I0929 23:22:39.231426 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zv6ck"] Sep 29 23:22:40 crc kubenswrapper[4922]: I0929 23:22:40.835143 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zv6ck" podUID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerName="registry-server" containerID="cri-o://41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0" gracePeriod=2 Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.310147 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.450341 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-catalog-content\") pod \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.450517 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nswjh\" (UniqueName: \"kubernetes.io/projected/380353ba-1abe-4fa8-8c51-7a03c912d7fb-kube-api-access-nswjh\") pod \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.451618 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-utilities\") pod \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\" (UID: \"380353ba-1abe-4fa8-8c51-7a03c912d7fb\") " Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.452083 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-utilities" (OuterVolumeSpecName: "utilities") pod "380353ba-1abe-4fa8-8c51-7a03c912d7fb" (UID: "380353ba-1abe-4fa8-8c51-7a03c912d7fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.452531 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.458303 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/380353ba-1abe-4fa8-8c51-7a03c912d7fb-kube-api-access-nswjh" (OuterVolumeSpecName: "kube-api-access-nswjh") pod "380353ba-1abe-4fa8-8c51-7a03c912d7fb" (UID: "380353ba-1abe-4fa8-8c51-7a03c912d7fb"). InnerVolumeSpecName "kube-api-access-nswjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.536650 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "380353ba-1abe-4fa8-8c51-7a03c912d7fb" (UID: "380353ba-1abe-4fa8-8c51-7a03c912d7fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.553808 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/380353ba-1abe-4fa8-8c51-7a03c912d7fb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.553861 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nswjh\" (UniqueName: \"kubernetes.io/projected/380353ba-1abe-4fa8-8c51-7a03c912d7fb-kube-api-access-nswjh\") on node \"crc\" DevicePath \"\"" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.849647 4922 generic.go:334] "Generic (PLEG): container finished" podID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerID="41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0" exitCode=0 Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.849724 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zv6ck" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.849721 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zv6ck" event={"ID":"380353ba-1abe-4fa8-8c51-7a03c912d7fb","Type":"ContainerDied","Data":"41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0"} Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.849895 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zv6ck" event={"ID":"380353ba-1abe-4fa8-8c51-7a03c912d7fb","Type":"ContainerDied","Data":"760813dc7d6d1eb99c16ee275ba34db2dc4280c4bb0da5fa356160624fb66095"} Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.849928 4922 scope.go:117] "RemoveContainer" containerID="41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.885816 4922 scope.go:117] "RemoveContainer" containerID="7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.913775 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zv6ck"] Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.923199 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zv6ck"] Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.932945 4922 scope.go:117] "RemoveContainer" containerID="6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.969831 4922 scope.go:117] "RemoveContainer" containerID="41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0" Sep 29 23:22:41 crc kubenswrapper[4922]: E0929 23:22:41.970679 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0\": container with ID starting with 41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0 not found: ID does not exist" containerID="41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.970722 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0"} err="failed to get container status \"41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0\": rpc error: code = NotFound desc = could not find container \"41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0\": container with ID starting with 41d00711004bacda536ba57b88b384e550097624336a0dae2ac566d71e2d59f0 not found: ID does not exist" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.970752 4922 scope.go:117] "RemoveContainer" containerID="7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69" Sep 29 23:22:41 crc kubenswrapper[4922]: E0929 23:22:41.971688 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69\": container with ID starting with 7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69 not found: ID does not exist" containerID="7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.971768 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69"} err="failed to get container status \"7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69\": rpc error: code = NotFound desc = could not find container \"7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69\": container with ID starting with 7da53775f60220feedf57e491e499dd6b8690bc94abaa37d2571d95bdbb99c69 not found: ID does not exist" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.971834 4922 scope.go:117] "RemoveContainer" containerID="6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790" Sep 29 23:22:41 crc kubenswrapper[4922]: E0929 23:22:41.972665 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790\": container with ID starting with 6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790 not found: ID does not exist" containerID="6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790" Sep 29 23:22:41 crc kubenswrapper[4922]: I0929 23:22:41.972758 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790"} err="failed to get container status \"6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790\": rpc error: code = NotFound desc = could not find container \"6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790\": container with ID starting with 6bc65b509638358429386d6a70b292c4cfb0124410a21ede2689f45be1e10790 not found: ID does not exist" Sep 29 23:22:42 crc kubenswrapper[4922]: I0929 23:22:42.461373 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" path="/var/lib/kubelet/pods/380353ba-1abe-4fa8-8c51-7a03c912d7fb/volumes" Sep 29 23:22:51 crc kubenswrapper[4922]: I0929 23:22:51.421835 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:22:51 crc kubenswrapper[4922]: E0929 23:22:51.422889 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:23:02 crc kubenswrapper[4922]: I0929 23:23:02.421989 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:23:02 crc kubenswrapper[4922]: E0929 23:23:02.423285 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:23:14 crc kubenswrapper[4922]: I0929 23:23:14.423761 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:23:14 crc kubenswrapper[4922]: E0929 23:23:14.425062 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:23:27 crc kubenswrapper[4922]: I0929 23:23:27.421918 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:23:27 crc kubenswrapper[4922]: E0929 23:23:27.422752 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:23:42 crc kubenswrapper[4922]: I0929 23:23:42.421873 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:23:43 crc kubenswrapper[4922]: I0929 23:23:43.429634 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"c2e15de6268620f4f5e4e99298e01ab126b31cc6c65d6e142fcb55c9efa32b79"} Sep 29 23:25:58 crc kubenswrapper[4922]: I0929 23:25:58.912553 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:25:58 crc kubenswrapper[4922]: I0929 23:25:58.913133 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:26:28 crc kubenswrapper[4922]: I0929 23:26:28.913026 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:26:28 crc kubenswrapper[4922]: I0929 23:26:28.913873 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.086781 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c6vfn"] Sep 29 23:26:48 crc kubenswrapper[4922]: E0929 23:26:48.087689 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerName="extract-utilities" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.087710 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerName="extract-utilities" Sep 29 23:26:48 crc kubenswrapper[4922]: E0929 23:26:48.087756 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b954263-a1b9-4408-92fd-356e46202600" containerName="extract-content" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.087770 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b954263-a1b9-4408-92fd-356e46202600" containerName="extract-content" Sep 29 23:26:48 crc kubenswrapper[4922]: E0929 23:26:48.087800 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b954263-a1b9-4408-92fd-356e46202600" containerName="extract-utilities" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.087815 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b954263-a1b9-4408-92fd-356e46202600" containerName="extract-utilities" Sep 29 23:26:48 crc kubenswrapper[4922]: E0929 23:26:48.087837 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerName="extract-content" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.087850 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerName="extract-content" Sep 29 23:26:48 crc kubenswrapper[4922]: E0929 23:26:48.087874 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerName="registry-server" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.087886 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerName="registry-server" Sep 29 23:26:48 crc kubenswrapper[4922]: E0929 23:26:48.087916 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b954263-a1b9-4408-92fd-356e46202600" containerName="registry-server" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.087929 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b954263-a1b9-4408-92fd-356e46202600" containerName="registry-server" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.088193 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="380353ba-1abe-4fa8-8c51-7a03c912d7fb" containerName="registry-server" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.088239 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b954263-a1b9-4408-92fd-356e46202600" containerName="registry-server" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.090102 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.147073 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6vfn"] Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.224562 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-catalog-content\") pod \"certified-operators-c6vfn\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.224610 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-utilities\") pod \"certified-operators-c6vfn\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.224729 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-947h6\" (UniqueName: \"kubernetes.io/projected/c9886d43-a7ad-4546-896e-1e8fa13200d8-kube-api-access-947h6\") pod \"certified-operators-c6vfn\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.325620 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-947h6\" (UniqueName: \"kubernetes.io/projected/c9886d43-a7ad-4546-896e-1e8fa13200d8-kube-api-access-947h6\") pod \"certified-operators-c6vfn\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.325710 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-catalog-content\") pod \"certified-operators-c6vfn\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.325750 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-utilities\") pod \"certified-operators-c6vfn\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.326417 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-utilities\") pod \"certified-operators-c6vfn\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.326531 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-catalog-content\") pod \"certified-operators-c6vfn\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.355884 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-947h6\" (UniqueName: \"kubernetes.io/projected/c9886d43-a7ad-4546-896e-1e8fa13200d8-kube-api-access-947h6\") pod \"certified-operators-c6vfn\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.423686 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:48 crc kubenswrapper[4922]: I0929 23:26:48.901273 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c6vfn"] Sep 29 23:26:49 crc kubenswrapper[4922]: I0929 23:26:49.197201 4922 generic.go:334] "Generic (PLEG): container finished" podID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerID="ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352" exitCode=0 Sep 29 23:26:49 crc kubenswrapper[4922]: I0929 23:26:49.197262 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6vfn" event={"ID":"c9886d43-a7ad-4546-896e-1e8fa13200d8","Type":"ContainerDied","Data":"ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352"} Sep 29 23:26:49 crc kubenswrapper[4922]: I0929 23:26:49.197300 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6vfn" event={"ID":"c9886d43-a7ad-4546-896e-1e8fa13200d8","Type":"ContainerStarted","Data":"43cfd95963cc0a507d5f23326001616c2eacb4ecc70b538c8738a155573ab3f4"} Sep 29 23:26:50 crc kubenswrapper[4922]: I0929 23:26:50.207864 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6vfn" event={"ID":"c9886d43-a7ad-4546-896e-1e8fa13200d8","Type":"ContainerStarted","Data":"9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6"} Sep 29 23:26:51 crc kubenswrapper[4922]: I0929 23:26:51.223103 4922 generic.go:334] "Generic (PLEG): container finished" podID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerID="9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6" exitCode=0 Sep 29 23:26:51 crc kubenswrapper[4922]: I0929 23:26:51.223146 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6vfn" event={"ID":"c9886d43-a7ad-4546-896e-1e8fa13200d8","Type":"ContainerDied","Data":"9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6"} Sep 29 23:26:52 crc kubenswrapper[4922]: I0929 23:26:52.232756 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6vfn" event={"ID":"c9886d43-a7ad-4546-896e-1e8fa13200d8","Type":"ContainerStarted","Data":"2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66"} Sep 29 23:26:58 crc kubenswrapper[4922]: I0929 23:26:58.433722 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:58 crc kubenswrapper[4922]: I0929 23:26:58.434311 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:58 crc kubenswrapper[4922]: I0929 23:26:58.474588 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:58 crc kubenswrapper[4922]: I0929 23:26:58.510057 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c6vfn" podStartSLOduration=8.071451821 podStartE2EDuration="10.51004098s" podCreationTimestamp="2025-09-29 23:26:48 +0000 UTC" firstStartedPulling="2025-09-29 23:26:49.202859545 +0000 UTC m=+3613.513148398" lastFinishedPulling="2025-09-29 23:26:51.641448734 +0000 UTC m=+3615.951737557" observedRunningTime="2025-09-29 23:26:52.257581071 +0000 UTC m=+3616.567869924" watchObservedRunningTime="2025-09-29 23:26:58.51004098 +0000 UTC m=+3622.820329793" Sep 29 23:26:58 crc kubenswrapper[4922]: I0929 23:26:58.913321 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:26:58 crc kubenswrapper[4922]: I0929 23:26:58.913444 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:26:58 crc kubenswrapper[4922]: I0929 23:26:58.913504 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:26:58 crc kubenswrapper[4922]: I0929 23:26:58.914195 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c2e15de6268620f4f5e4e99298e01ab126b31cc6c65d6e142fcb55c9efa32b79"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:26:58 crc kubenswrapper[4922]: I0929 23:26:58.914280 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://c2e15de6268620f4f5e4e99298e01ab126b31cc6c65d6e142fcb55c9efa32b79" gracePeriod=600 Sep 29 23:26:59 crc kubenswrapper[4922]: I0929 23:26:59.296668 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="c2e15de6268620f4f5e4e99298e01ab126b31cc6c65d6e142fcb55c9efa32b79" exitCode=0 Sep 29 23:26:59 crc kubenswrapper[4922]: I0929 23:26:59.296720 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"c2e15de6268620f4f5e4e99298e01ab126b31cc6c65d6e142fcb55c9efa32b79"} Sep 29 23:26:59 crc kubenswrapper[4922]: I0929 23:26:59.297811 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e"} Sep 29 23:26:59 crc kubenswrapper[4922]: I0929 23:26:59.297832 4922 scope.go:117] "RemoveContainer" containerID="e0123c570fac352aa83374dbc69b7418fd60b406ebe5e70009a23d4cd6517b26" Sep 29 23:26:59 crc kubenswrapper[4922]: I0929 23:26:59.379796 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:26:59 crc kubenswrapper[4922]: I0929 23:26:59.435635 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c6vfn"] Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.319588 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c6vfn" podUID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerName="registry-server" containerID="cri-o://2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66" gracePeriod=2 Sep 29 23:27:01 crc kubenswrapper[4922]: E0929 23:27:01.569537 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9886d43_a7ad_4546_896e_1e8fa13200d8.slice/crio-conmon-2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66.scope\": RecentStats: unable to find data in memory cache]" Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.770556 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.839030 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-utilities\") pod \"c9886d43-a7ad-4546-896e-1e8fa13200d8\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.839148 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-catalog-content\") pod \"c9886d43-a7ad-4546-896e-1e8fa13200d8\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.839298 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-947h6\" (UniqueName: \"kubernetes.io/projected/c9886d43-a7ad-4546-896e-1e8fa13200d8-kube-api-access-947h6\") pod \"c9886d43-a7ad-4546-896e-1e8fa13200d8\" (UID: \"c9886d43-a7ad-4546-896e-1e8fa13200d8\") " Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.840958 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-utilities" (OuterVolumeSpecName: "utilities") pod "c9886d43-a7ad-4546-896e-1e8fa13200d8" (UID: "c9886d43-a7ad-4546-896e-1e8fa13200d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.846937 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9886d43-a7ad-4546-896e-1e8fa13200d8-kube-api-access-947h6" (OuterVolumeSpecName: "kube-api-access-947h6") pod "c9886d43-a7ad-4546-896e-1e8fa13200d8" (UID: "c9886d43-a7ad-4546-896e-1e8fa13200d8"). InnerVolumeSpecName "kube-api-access-947h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.904005 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c9886d43-a7ad-4546-896e-1e8fa13200d8" (UID: "c9886d43-a7ad-4546-896e-1e8fa13200d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.940836 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.940889 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-947h6\" (UniqueName: \"kubernetes.io/projected/c9886d43-a7ad-4546-896e-1e8fa13200d8-kube-api-access-947h6\") on node \"crc\" DevicePath \"\"" Sep 29 23:27:01 crc kubenswrapper[4922]: I0929 23:27:01.940901 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9886d43-a7ad-4546-896e-1e8fa13200d8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.330970 4922 generic.go:334] "Generic (PLEG): container finished" podID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerID="2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66" exitCode=0 Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.331019 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6vfn" event={"ID":"c9886d43-a7ad-4546-896e-1e8fa13200d8","Type":"ContainerDied","Data":"2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66"} Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.331052 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c6vfn" event={"ID":"c9886d43-a7ad-4546-896e-1e8fa13200d8","Type":"ContainerDied","Data":"43cfd95963cc0a507d5f23326001616c2eacb4ecc70b538c8738a155573ab3f4"} Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.331059 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c6vfn" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.331070 4922 scope.go:117] "RemoveContainer" containerID="2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.358964 4922 scope.go:117] "RemoveContainer" containerID="9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.379880 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c6vfn"] Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.391736 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c6vfn"] Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.403517 4922 scope.go:117] "RemoveContainer" containerID="ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.438338 4922 scope.go:117] "RemoveContainer" containerID="2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66" Sep 29 23:27:02 crc kubenswrapper[4922]: E0929 23:27:02.439001 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66\": container with ID starting with 2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66 not found: ID does not exist" containerID="2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.439258 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66"} err="failed to get container status \"2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66\": rpc error: code = NotFound desc = could not find container \"2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66\": container with ID starting with 2ffab9a7be2b4f75303d4b832cd868270f04b7f0de4eeb8952b6aaa7bd625d66 not found: ID does not exist" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.439560 4922 scope.go:117] "RemoveContainer" containerID="9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6" Sep 29 23:27:02 crc kubenswrapper[4922]: E0929 23:27:02.440166 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6\": container with ID starting with 9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6 not found: ID does not exist" containerID="9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.440227 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6"} err="failed to get container status \"9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6\": rpc error: code = NotFound desc = could not find container \"9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6\": container with ID starting with 9619f73248f106710e5b3de0d3a21dd9c6ebe68e839e0fbe1e6887cf98178bf6 not found: ID does not exist" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.440268 4922 scope.go:117] "RemoveContainer" containerID="ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.440306 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9886d43-a7ad-4546-896e-1e8fa13200d8" path="/var/lib/kubelet/pods/c9886d43-a7ad-4546-896e-1e8fa13200d8/volumes" Sep 29 23:27:02 crc kubenswrapper[4922]: E0929 23:27:02.440787 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352\": container with ID starting with ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352 not found: ID does not exist" containerID="ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352" Sep 29 23:27:02 crc kubenswrapper[4922]: I0929 23:27:02.440843 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352"} err="failed to get container status \"ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352\": rpc error: code = NotFound desc = could not find container \"ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352\": container with ID starting with ceb9fb0abe413525892d29cc935dce281b705ffe89aca9947952aa6595c36352 not found: ID does not exist" Sep 29 23:29:28 crc kubenswrapper[4922]: I0929 23:29:28.913171 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:29:28 crc kubenswrapper[4922]: I0929 23:29:28.913863 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:29:58 crc kubenswrapper[4922]: I0929 23:29:58.913036 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:29:58 crc kubenswrapper[4922]: I0929 23:29:58.913722 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.168998 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s"] Sep 29 23:30:00 crc kubenswrapper[4922]: E0929 23:30:00.169866 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerName="extract-utilities" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.169888 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerName="extract-utilities" Sep 29 23:30:00 crc kubenswrapper[4922]: E0929 23:30:00.169914 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerName="extract-content" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.169927 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerName="extract-content" Sep 29 23:30:00 crc kubenswrapper[4922]: E0929 23:30:00.169953 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerName="registry-server" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.169967 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerName="registry-server" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.170240 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9886d43-a7ad-4546-896e-1e8fa13200d8" containerName="registry-server" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.171097 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.177028 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.177347 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.183902 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s"] Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.290625 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1574c25c-5b6e-4b42-97b4-b37e85b535c0-config-volume\") pod \"collect-profiles-29319810-5r88s\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.290732 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6vzn\" (UniqueName: \"kubernetes.io/projected/1574c25c-5b6e-4b42-97b4-b37e85b535c0-kube-api-access-m6vzn\") pod \"collect-profiles-29319810-5r88s\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.291222 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1574c25c-5b6e-4b42-97b4-b37e85b535c0-secret-volume\") pod \"collect-profiles-29319810-5r88s\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.393136 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1574c25c-5b6e-4b42-97b4-b37e85b535c0-config-volume\") pod \"collect-profiles-29319810-5r88s\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.393301 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6vzn\" (UniqueName: \"kubernetes.io/projected/1574c25c-5b6e-4b42-97b4-b37e85b535c0-kube-api-access-m6vzn\") pod \"collect-profiles-29319810-5r88s\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.393467 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1574c25c-5b6e-4b42-97b4-b37e85b535c0-secret-volume\") pod \"collect-profiles-29319810-5r88s\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.395042 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1574c25c-5b6e-4b42-97b4-b37e85b535c0-config-volume\") pod \"collect-profiles-29319810-5r88s\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.409107 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1574c25c-5b6e-4b42-97b4-b37e85b535c0-secret-volume\") pod \"collect-profiles-29319810-5r88s\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.426076 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6vzn\" (UniqueName: \"kubernetes.io/projected/1574c25c-5b6e-4b42-97b4-b37e85b535c0-kube-api-access-m6vzn\") pod \"collect-profiles-29319810-5r88s\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.503713 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:00 crc kubenswrapper[4922]: I0929 23:30:00.952217 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s"] Sep 29 23:30:01 crc kubenswrapper[4922]: I0929 23:30:01.008996 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" event={"ID":"1574c25c-5b6e-4b42-97b4-b37e85b535c0","Type":"ContainerStarted","Data":"c5a9400df7217ea324398cd5772f3cefed90eaf84895298aa7257abc4169427d"} Sep 29 23:30:02 crc kubenswrapper[4922]: I0929 23:30:02.019724 4922 generic.go:334] "Generic (PLEG): container finished" podID="1574c25c-5b6e-4b42-97b4-b37e85b535c0" containerID="4638d6f25be9c6e0b973b24298661a25674cbfc556658026f7ef1f1d0726b7d2" exitCode=0 Sep 29 23:30:02 crc kubenswrapper[4922]: I0929 23:30:02.019802 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" event={"ID":"1574c25c-5b6e-4b42-97b4-b37e85b535c0","Type":"ContainerDied","Data":"4638d6f25be9c6e0b973b24298661a25674cbfc556658026f7ef1f1d0726b7d2"} Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.373050 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.457129 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6vzn\" (UniqueName: \"kubernetes.io/projected/1574c25c-5b6e-4b42-97b4-b37e85b535c0-kube-api-access-m6vzn\") pod \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.457448 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1574c25c-5b6e-4b42-97b4-b37e85b535c0-secret-volume\") pod \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.457569 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1574c25c-5b6e-4b42-97b4-b37e85b535c0-config-volume\") pod \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\" (UID: \"1574c25c-5b6e-4b42-97b4-b37e85b535c0\") " Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.458957 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1574c25c-5b6e-4b42-97b4-b37e85b535c0-config-volume" (OuterVolumeSpecName: "config-volume") pod "1574c25c-5b6e-4b42-97b4-b37e85b535c0" (UID: "1574c25c-5b6e-4b42-97b4-b37e85b535c0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.464283 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1574c25c-5b6e-4b42-97b4-b37e85b535c0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1574c25c-5b6e-4b42-97b4-b37e85b535c0" (UID: "1574c25c-5b6e-4b42-97b4-b37e85b535c0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.465908 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1574c25c-5b6e-4b42-97b4-b37e85b535c0-kube-api-access-m6vzn" (OuterVolumeSpecName: "kube-api-access-m6vzn") pod "1574c25c-5b6e-4b42-97b4-b37e85b535c0" (UID: "1574c25c-5b6e-4b42-97b4-b37e85b535c0"). InnerVolumeSpecName "kube-api-access-m6vzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.559366 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1574c25c-5b6e-4b42-97b4-b37e85b535c0-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.559448 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1574c25c-5b6e-4b42-97b4-b37e85b535c0-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 23:30:03 crc kubenswrapper[4922]: I0929 23:30:03.559472 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6vzn\" (UniqueName: \"kubernetes.io/projected/1574c25c-5b6e-4b42-97b4-b37e85b535c0-kube-api-access-m6vzn\") on node \"crc\" DevicePath \"\"" Sep 29 23:30:04 crc kubenswrapper[4922]: I0929 23:30:04.039234 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" event={"ID":"1574c25c-5b6e-4b42-97b4-b37e85b535c0","Type":"ContainerDied","Data":"c5a9400df7217ea324398cd5772f3cefed90eaf84895298aa7257abc4169427d"} Sep 29 23:30:04 crc kubenswrapper[4922]: I0929 23:30:04.039296 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5a9400df7217ea324398cd5772f3cefed90eaf84895298aa7257abc4169427d" Sep 29 23:30:04 crc kubenswrapper[4922]: I0929 23:30:04.039381 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s" Sep 29 23:30:04 crc kubenswrapper[4922]: I0929 23:30:04.452250 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4"] Sep 29 23:30:04 crc kubenswrapper[4922]: I0929 23:30:04.460855 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319765-fz9h4"] Sep 29 23:30:06 crc kubenswrapper[4922]: I0929 23:30:06.432326 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b5656b9-b6f8-4707-b988-b3bbc24986b2" path="/var/lib/kubelet/pods/4b5656b9-b6f8-4707-b988-b3bbc24986b2/volumes" Sep 29 23:30:28 crc kubenswrapper[4922]: I0929 23:30:28.913298 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:30:28 crc kubenswrapper[4922]: I0929 23:30:28.914419 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:30:28 crc kubenswrapper[4922]: I0929 23:30:28.914495 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:30:28 crc kubenswrapper[4922]: I0929 23:30:28.915499 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:30:28 crc kubenswrapper[4922]: I0929 23:30:28.915567 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" gracePeriod=600 Sep 29 23:30:29 crc kubenswrapper[4922]: E0929 23:30:29.052442 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:30:29 crc kubenswrapper[4922]: I0929 23:30:29.313499 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" exitCode=0 Sep 29 23:30:29 crc kubenswrapper[4922]: I0929 23:30:29.313556 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e"} Sep 29 23:30:29 crc kubenswrapper[4922]: I0929 23:30:29.314044 4922 scope.go:117] "RemoveContainer" containerID="c2e15de6268620f4f5e4e99298e01ab126b31cc6c65d6e142fcb55c9efa32b79" Sep 29 23:30:29 crc kubenswrapper[4922]: I0929 23:30:29.314701 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:30:29 crc kubenswrapper[4922]: E0929 23:30:29.315015 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:30:43 crc kubenswrapper[4922]: I0929 23:30:43.422239 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:30:43 crc kubenswrapper[4922]: E0929 23:30:43.423481 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:30:51 crc kubenswrapper[4922]: I0929 23:30:51.139440 4922 scope.go:117] "RemoveContainer" containerID="da8ea04b3bd8c2168e8dfc1a97d02fc2c7375bcacfba3760942e140bef2d41dc" Sep 29 23:30:55 crc kubenswrapper[4922]: I0929 23:30:55.422758 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:30:55 crc kubenswrapper[4922]: E0929 23:30:55.423561 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:31:08 crc kubenswrapper[4922]: I0929 23:31:08.422159 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:31:08 crc kubenswrapper[4922]: E0929 23:31:08.423227 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:31:23 crc kubenswrapper[4922]: I0929 23:31:23.424248 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:31:23 crc kubenswrapper[4922]: E0929 23:31:23.425653 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:31:36 crc kubenswrapper[4922]: I0929 23:31:36.431972 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:31:36 crc kubenswrapper[4922]: E0929 23:31:36.432964 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:31:48 crc kubenswrapper[4922]: I0929 23:31:48.422506 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:31:48 crc kubenswrapper[4922]: E0929 23:31:48.425887 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:31:59 crc kubenswrapper[4922]: I0929 23:31:59.422644 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:31:59 crc kubenswrapper[4922]: E0929 23:31:59.423669 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:32:12 crc kubenswrapper[4922]: I0929 23:32:12.421841 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:32:12 crc kubenswrapper[4922]: E0929 23:32:12.423002 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:32:27 crc kubenswrapper[4922]: I0929 23:32:27.422042 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:32:27 crc kubenswrapper[4922]: E0929 23:32:27.423111 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:32:39 crc kubenswrapper[4922]: I0929 23:32:39.422024 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:32:39 crc kubenswrapper[4922]: E0929 23:32:39.422824 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:32:51 crc kubenswrapper[4922]: I0929 23:32:51.421782 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:32:51 crc kubenswrapper[4922]: E0929 23:32:51.422968 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:33:03 crc kubenswrapper[4922]: I0929 23:33:03.796522 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mlb57"] Sep 29 23:33:03 crc kubenswrapper[4922]: E0929 23:33:03.797717 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1574c25c-5b6e-4b42-97b4-b37e85b535c0" containerName="collect-profiles" Sep 29 23:33:03 crc kubenswrapper[4922]: I0929 23:33:03.797734 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1574c25c-5b6e-4b42-97b4-b37e85b535c0" containerName="collect-profiles" Sep 29 23:33:03 crc kubenswrapper[4922]: I0929 23:33:03.798115 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1574c25c-5b6e-4b42-97b4-b37e85b535c0" containerName="collect-profiles" Sep 29 23:33:03 crc kubenswrapper[4922]: I0929 23:33:03.800452 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:03 crc kubenswrapper[4922]: I0929 23:33:03.826251 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mlb57"] Sep 29 23:33:03 crc kubenswrapper[4922]: I0929 23:33:03.939068 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9j8m\" (UniqueName: \"kubernetes.io/projected/4beaf6a9-f0f7-4394-881c-d329e31ed69e-kube-api-access-z9j8m\") pod \"redhat-operators-mlb57\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:03 crc kubenswrapper[4922]: I0929 23:33:03.939393 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-catalog-content\") pod \"redhat-operators-mlb57\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:03 crc kubenswrapper[4922]: I0929 23:33:03.939507 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-utilities\") pod \"redhat-operators-mlb57\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.041076 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-utilities\") pod \"redhat-operators-mlb57\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.041151 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9j8m\" (UniqueName: \"kubernetes.io/projected/4beaf6a9-f0f7-4394-881c-d329e31ed69e-kube-api-access-z9j8m\") pod \"redhat-operators-mlb57\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.041182 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-catalog-content\") pod \"redhat-operators-mlb57\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.041672 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-utilities\") pod \"redhat-operators-mlb57\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.041710 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-catalog-content\") pod \"redhat-operators-mlb57\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.058475 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9j8m\" (UniqueName: \"kubernetes.io/projected/4beaf6a9-f0f7-4394-881c-d329e31ed69e-kube-api-access-z9j8m\") pod \"redhat-operators-mlb57\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.139581 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.551894 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mlb57"] Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.830779 4922 generic.go:334] "Generic (PLEG): container finished" podID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerID="ea7b72ab169503188590f4d19af0d624106a1a29153aebcf2d0a4add60bcdbb1" exitCode=0 Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.830829 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlb57" event={"ID":"4beaf6a9-f0f7-4394-881c-d329e31ed69e","Type":"ContainerDied","Data":"ea7b72ab169503188590f4d19af0d624106a1a29153aebcf2d0a4add60bcdbb1"} Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.830872 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlb57" event={"ID":"4beaf6a9-f0f7-4394-881c-d329e31ed69e","Type":"ContainerStarted","Data":"fbcb74859042b56e3ac3600f0019da0aba27792748a231a20171bd03959f85e1"} Sep 29 23:33:04 crc kubenswrapper[4922]: I0929 23:33:04.832308 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 23:33:05 crc kubenswrapper[4922]: I0929 23:33:05.422473 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:33:05 crc kubenswrapper[4922]: E0929 23:33:05.422997 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:33:06 crc kubenswrapper[4922]: I0929 23:33:06.850758 4922 generic.go:334] "Generic (PLEG): container finished" podID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerID="b5f0bf72961c64a02a2563df79f8de2ae4bd78da31951cf6d1a13354cbdc3e15" exitCode=0 Sep 29 23:33:06 crc kubenswrapper[4922]: I0929 23:33:06.850819 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlb57" event={"ID":"4beaf6a9-f0f7-4394-881c-d329e31ed69e","Type":"ContainerDied","Data":"b5f0bf72961c64a02a2563df79f8de2ae4bd78da31951cf6d1a13354cbdc3e15"} Sep 29 23:33:07 crc kubenswrapper[4922]: I0929 23:33:07.860499 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlb57" event={"ID":"4beaf6a9-f0f7-4394-881c-d329e31ed69e","Type":"ContainerStarted","Data":"25c62ae381f47c91364c79f15a1746c9ddaab22fd03484bca60fa2e7fe4d1087"} Sep 29 23:33:07 crc kubenswrapper[4922]: I0929 23:33:07.880704 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mlb57" podStartSLOduration=2.339582398 podStartE2EDuration="4.880687885s" podCreationTimestamp="2025-09-29 23:33:03 +0000 UTC" firstStartedPulling="2025-09-29 23:33:04.832093413 +0000 UTC m=+3989.142382216" lastFinishedPulling="2025-09-29 23:33:07.37319885 +0000 UTC m=+3991.683487703" observedRunningTime="2025-09-29 23:33:07.875542407 +0000 UTC m=+3992.185831240" watchObservedRunningTime="2025-09-29 23:33:07.880687885 +0000 UTC m=+3992.190976698" Sep 29 23:33:14 crc kubenswrapper[4922]: I0929 23:33:14.141226 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:14 crc kubenswrapper[4922]: I0929 23:33:14.141898 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:14 crc kubenswrapper[4922]: I0929 23:33:14.223669 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:14 crc kubenswrapper[4922]: I0929 23:33:14.994271 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:15 crc kubenswrapper[4922]: I0929 23:33:15.050696 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mlb57"] Sep 29 23:33:16 crc kubenswrapper[4922]: I0929 23:33:16.938804 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mlb57" podUID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerName="registry-server" containerID="cri-o://25c62ae381f47c91364c79f15a1746c9ddaab22fd03484bca60fa2e7fe4d1087" gracePeriod=2 Sep 29 23:33:17 crc kubenswrapper[4922]: I0929 23:33:17.953159 4922 generic.go:334] "Generic (PLEG): container finished" podID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerID="25c62ae381f47c91364c79f15a1746c9ddaab22fd03484bca60fa2e7fe4d1087" exitCode=0 Sep 29 23:33:17 crc kubenswrapper[4922]: I0929 23:33:17.953254 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlb57" event={"ID":"4beaf6a9-f0f7-4394-881c-d329e31ed69e","Type":"ContainerDied","Data":"25c62ae381f47c91364c79f15a1746c9ddaab22fd03484bca60fa2e7fe4d1087"} Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.533639 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.668609 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9j8m\" (UniqueName: \"kubernetes.io/projected/4beaf6a9-f0f7-4394-881c-d329e31ed69e-kube-api-access-z9j8m\") pod \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.668687 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-catalog-content\") pod \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.668823 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-utilities\") pod \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\" (UID: \"4beaf6a9-f0f7-4394-881c-d329e31ed69e\") " Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.670826 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-utilities" (OuterVolumeSpecName: "utilities") pod "4beaf6a9-f0f7-4394-881c-d329e31ed69e" (UID: "4beaf6a9-f0f7-4394-881c-d329e31ed69e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.674571 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4beaf6a9-f0f7-4394-881c-d329e31ed69e-kube-api-access-z9j8m" (OuterVolumeSpecName: "kube-api-access-z9j8m") pod "4beaf6a9-f0f7-4394-881c-d329e31ed69e" (UID: "4beaf6a9-f0f7-4394-881c-d329e31ed69e"). InnerVolumeSpecName "kube-api-access-z9j8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.745421 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4beaf6a9-f0f7-4394-881c-d329e31ed69e" (UID: "4beaf6a9-f0f7-4394-881c-d329e31ed69e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.770540 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.770569 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9j8m\" (UniqueName: \"kubernetes.io/projected/4beaf6a9-f0f7-4394-881c-d329e31ed69e-kube-api-access-z9j8m\") on node \"crc\" DevicePath \"\"" Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.770596 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4beaf6a9-f0f7-4394-881c-d329e31ed69e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.967071 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlb57" event={"ID":"4beaf6a9-f0f7-4394-881c-d329e31ed69e","Type":"ContainerDied","Data":"fbcb74859042b56e3ac3600f0019da0aba27792748a231a20171bd03959f85e1"} Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.967153 4922 scope.go:117] "RemoveContainer" containerID="25c62ae381f47c91364c79f15a1746c9ddaab22fd03484bca60fa2e7fe4d1087" Sep 29 23:33:18 crc kubenswrapper[4922]: I0929 23:33:18.967245 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlb57" Sep 29 23:33:19 crc kubenswrapper[4922]: I0929 23:33:19.000298 4922 scope.go:117] "RemoveContainer" containerID="b5f0bf72961c64a02a2563df79f8de2ae4bd78da31951cf6d1a13354cbdc3e15" Sep 29 23:33:19 crc kubenswrapper[4922]: I0929 23:33:19.025566 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mlb57"] Sep 29 23:33:19 crc kubenswrapper[4922]: I0929 23:33:19.040461 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mlb57"] Sep 29 23:33:19 crc kubenswrapper[4922]: I0929 23:33:19.052003 4922 scope.go:117] "RemoveContainer" containerID="ea7b72ab169503188590f4d19af0d624106a1a29153aebcf2d0a4add60bcdbb1" Sep 29 23:33:20 crc kubenswrapper[4922]: I0929 23:33:20.422483 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:33:20 crc kubenswrapper[4922]: E0929 23:33:20.423185 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:33:20 crc kubenswrapper[4922]: I0929 23:33:20.439753 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" path="/var/lib/kubelet/pods/4beaf6a9-f0f7-4394-881c-d329e31ed69e/volumes" Sep 29 23:33:31 crc kubenswrapper[4922]: I0929 23:33:31.422453 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:33:31 crc kubenswrapper[4922]: E0929 23:33:31.423420 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.010165 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tk9nw"] Sep 29 23:33:42 crc kubenswrapper[4922]: E0929 23:33:42.011696 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerName="extract-utilities" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.011727 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerName="extract-utilities" Sep 29 23:33:42 crc kubenswrapper[4922]: E0929 23:33:42.011760 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerName="extract-content" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.011776 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerName="extract-content" Sep 29 23:33:42 crc kubenswrapper[4922]: E0929 23:33:42.011845 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerName="registry-server" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.011858 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerName="registry-server" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.012142 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4beaf6a9-f0f7-4394-881c-d329e31ed69e" containerName="registry-server" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.013931 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.020342 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tk9nw"] Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.159266 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-catalog-content\") pod \"community-operators-tk9nw\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.159378 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jswq\" (UniqueName: \"kubernetes.io/projected/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-kube-api-access-2jswq\") pod \"community-operators-tk9nw\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.159505 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-utilities\") pod \"community-operators-tk9nw\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.260637 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-catalog-content\") pod \"community-operators-tk9nw\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.260795 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jswq\" (UniqueName: \"kubernetes.io/projected/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-kube-api-access-2jswq\") pod \"community-operators-tk9nw\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.260891 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-utilities\") pod \"community-operators-tk9nw\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.261450 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-catalog-content\") pod \"community-operators-tk9nw\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.262195 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-utilities\") pod \"community-operators-tk9nw\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.284280 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jswq\" (UniqueName: \"kubernetes.io/projected/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-kube-api-access-2jswq\") pod \"community-operators-tk9nw\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.353416 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:42 crc kubenswrapper[4922]: I0929 23:33:42.934920 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tk9nw"] Sep 29 23:33:43 crc kubenswrapper[4922]: I0929 23:33:43.214024 4922 generic.go:334] "Generic (PLEG): container finished" podID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerID="5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2" exitCode=0 Sep 29 23:33:43 crc kubenswrapper[4922]: I0929 23:33:43.214093 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9nw" event={"ID":"f030cfa7-fd1a-4b89-86e3-ededbe1090f8","Type":"ContainerDied","Data":"5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2"} Sep 29 23:33:43 crc kubenswrapper[4922]: I0929 23:33:43.214133 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9nw" event={"ID":"f030cfa7-fd1a-4b89-86e3-ededbe1090f8","Type":"ContainerStarted","Data":"d194d7f65efa0b469eb4cb1a416a35f4e3869a86889283b5372c25ad8307f30a"} Sep 29 23:33:44 crc kubenswrapper[4922]: I0929 23:33:44.224503 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9nw" event={"ID":"f030cfa7-fd1a-4b89-86e3-ededbe1090f8","Type":"ContainerStarted","Data":"9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364"} Sep 29 23:33:45 crc kubenswrapper[4922]: I0929 23:33:45.240327 4922 generic.go:334] "Generic (PLEG): container finished" podID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerID="9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364" exitCode=0 Sep 29 23:33:45 crc kubenswrapper[4922]: I0929 23:33:45.240417 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9nw" event={"ID":"f030cfa7-fd1a-4b89-86e3-ededbe1090f8","Type":"ContainerDied","Data":"9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364"} Sep 29 23:33:45 crc kubenswrapper[4922]: I0929 23:33:45.422669 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:33:45 crc kubenswrapper[4922]: E0929 23:33:45.423050 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:33:46 crc kubenswrapper[4922]: I0929 23:33:46.257130 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9nw" event={"ID":"f030cfa7-fd1a-4b89-86e3-ededbe1090f8","Type":"ContainerStarted","Data":"58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c"} Sep 29 23:33:46 crc kubenswrapper[4922]: I0929 23:33:46.286126 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tk9nw" podStartSLOduration=2.833447147 podStartE2EDuration="5.286104263s" podCreationTimestamp="2025-09-29 23:33:41 +0000 UTC" firstStartedPulling="2025-09-29 23:33:43.21615245 +0000 UTC m=+4027.526441293" lastFinishedPulling="2025-09-29 23:33:45.668809566 +0000 UTC m=+4029.979098409" observedRunningTime="2025-09-29 23:33:46.28316774 +0000 UTC m=+4030.593456553" watchObservedRunningTime="2025-09-29 23:33:46.286104263 +0000 UTC m=+4030.596393076" Sep 29 23:33:52 crc kubenswrapper[4922]: I0929 23:33:52.353644 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:52 crc kubenswrapper[4922]: I0929 23:33:52.354737 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:52 crc kubenswrapper[4922]: I0929 23:33:52.439356 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:53 crc kubenswrapper[4922]: I0929 23:33:53.411351 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:53 crc kubenswrapper[4922]: I0929 23:33:53.477138 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tk9nw"] Sep 29 23:33:55 crc kubenswrapper[4922]: I0929 23:33:55.356333 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tk9nw" podUID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerName="registry-server" containerID="cri-o://58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c" gracePeriod=2 Sep 29 23:33:55 crc kubenswrapper[4922]: I0929 23:33:55.830726 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:55 crc kubenswrapper[4922]: I0929 23:33:55.907939 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-catalog-content\") pod \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " Sep 29 23:33:55 crc kubenswrapper[4922]: I0929 23:33:55.908038 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jswq\" (UniqueName: \"kubernetes.io/projected/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-kube-api-access-2jswq\") pod \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " Sep 29 23:33:55 crc kubenswrapper[4922]: I0929 23:33:55.908127 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-utilities\") pod \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\" (UID: \"f030cfa7-fd1a-4b89-86e3-ededbe1090f8\") " Sep 29 23:33:55 crc kubenswrapper[4922]: I0929 23:33:55.910005 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-utilities" (OuterVolumeSpecName: "utilities") pod "f030cfa7-fd1a-4b89-86e3-ededbe1090f8" (UID: "f030cfa7-fd1a-4b89-86e3-ededbe1090f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:33:55 crc kubenswrapper[4922]: I0929 23:33:55.916510 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-kube-api-access-2jswq" (OuterVolumeSpecName: "kube-api-access-2jswq") pod "f030cfa7-fd1a-4b89-86e3-ededbe1090f8" (UID: "f030cfa7-fd1a-4b89-86e3-ededbe1090f8"). InnerVolumeSpecName "kube-api-access-2jswq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.009549 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jswq\" (UniqueName: \"kubernetes.io/projected/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-kube-api-access-2jswq\") on node \"crc\" DevicePath \"\"" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.009598 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.010726 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f030cfa7-fd1a-4b89-86e3-ededbe1090f8" (UID: "f030cfa7-fd1a-4b89-86e3-ededbe1090f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.110898 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f030cfa7-fd1a-4b89-86e3-ededbe1090f8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.373100 4922 generic.go:334] "Generic (PLEG): container finished" podID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerID="58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c" exitCode=0 Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.373177 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9nw" event={"ID":"f030cfa7-fd1a-4b89-86e3-ededbe1090f8","Type":"ContainerDied","Data":"58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c"} Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.373233 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tk9nw" event={"ID":"f030cfa7-fd1a-4b89-86e3-ededbe1090f8","Type":"ContainerDied","Data":"d194d7f65efa0b469eb4cb1a416a35f4e3869a86889283b5372c25ad8307f30a"} Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.373264 4922 scope.go:117] "RemoveContainer" containerID="58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.373332 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tk9nw" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.404368 4922 scope.go:117] "RemoveContainer" containerID="9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.440899 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:33:56 crc kubenswrapper[4922]: E0929 23:33:56.441326 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.461215 4922 scope.go:117] "RemoveContainer" containerID="5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.466630 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tk9nw"] Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.466692 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tk9nw"] Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.487362 4922 scope.go:117] "RemoveContainer" containerID="58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c" Sep 29 23:33:56 crc kubenswrapper[4922]: E0929 23:33:56.488063 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c\": container with ID starting with 58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c not found: ID does not exist" containerID="58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.488144 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c"} err="failed to get container status \"58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c\": rpc error: code = NotFound desc = could not find container \"58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c\": container with ID starting with 58551ceffaec775f508600c43fdce3e1730031eb616224acaf1730444b13e24c not found: ID does not exist" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.488181 4922 scope.go:117] "RemoveContainer" containerID="9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364" Sep 29 23:33:56 crc kubenswrapper[4922]: E0929 23:33:56.488718 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364\": container with ID starting with 9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364 not found: ID does not exist" containerID="9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.488824 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364"} err="failed to get container status \"9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364\": rpc error: code = NotFound desc = could not find container \"9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364\": container with ID starting with 9a7e4cae7409791524557e2c33e8618ceb27562f285015528ef9ddbee39cc364 not found: ID does not exist" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.488936 4922 scope.go:117] "RemoveContainer" containerID="5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2" Sep 29 23:33:56 crc kubenswrapper[4922]: E0929 23:33:56.489351 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2\": container with ID starting with 5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2 not found: ID does not exist" containerID="5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2" Sep 29 23:33:56 crc kubenswrapper[4922]: I0929 23:33:56.489441 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2"} err="failed to get container status \"5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2\": rpc error: code = NotFound desc = could not find container \"5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2\": container with ID starting with 5a7a3b71938a4110adc570dd7ee6d7f0ddbd8a01be946d30ddc6026d352cd5f2 not found: ID does not exist" Sep 29 23:33:58 crc kubenswrapper[4922]: I0929 23:33:58.435214 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" path="/var/lib/kubelet/pods/f030cfa7-fd1a-4b89-86e3-ededbe1090f8/volumes" Sep 29 23:34:10 crc kubenswrapper[4922]: I0929 23:34:10.422584 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:34:10 crc kubenswrapper[4922]: E0929 23:34:10.423730 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:34:21 crc kubenswrapper[4922]: I0929 23:34:21.422961 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:34:21 crc kubenswrapper[4922]: E0929 23:34:21.424803 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:34:32 crc kubenswrapper[4922]: I0929 23:34:32.421906 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:34:32 crc kubenswrapper[4922]: E0929 23:34:32.423024 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:34:43 crc kubenswrapper[4922]: I0929 23:34:43.422314 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:34:43 crc kubenswrapper[4922]: E0929 23:34:43.424977 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:34:55 crc kubenswrapper[4922]: I0929 23:34:55.422845 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:34:55 crc kubenswrapper[4922]: E0929 23:34:55.423881 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:35:09 crc kubenswrapper[4922]: I0929 23:35:09.422586 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:35:09 crc kubenswrapper[4922]: E0929 23:35:09.423555 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:35:23 crc kubenswrapper[4922]: I0929 23:35:23.425968 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:35:23 crc kubenswrapper[4922]: E0929 23:35:23.427046 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:35:34 crc kubenswrapper[4922]: I0929 23:35:34.422142 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:35:35 crc kubenswrapper[4922]: I0929 23:35:35.382992 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"dd5d46348d2b4160756fb783a5d08cd46cf0fb1625e2e053deba1ede6799a64b"} Sep 29 23:35:45 crc kubenswrapper[4922]: I0929 23:35:45.954647 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m6rw5"] Sep 29 23:35:45 crc kubenswrapper[4922]: E0929 23:35:45.956100 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerName="extract-utilities" Sep 29 23:35:45 crc kubenswrapper[4922]: I0929 23:35:45.956131 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerName="extract-utilities" Sep 29 23:35:45 crc kubenswrapper[4922]: E0929 23:35:45.956176 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerName="extract-content" Sep 29 23:35:45 crc kubenswrapper[4922]: I0929 23:35:45.956192 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerName="extract-content" Sep 29 23:35:45 crc kubenswrapper[4922]: E0929 23:35:45.956226 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerName="registry-server" Sep 29 23:35:45 crc kubenswrapper[4922]: I0929 23:35:45.956242 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerName="registry-server" Sep 29 23:35:45 crc kubenswrapper[4922]: I0929 23:35:45.956574 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f030cfa7-fd1a-4b89-86e3-ededbe1090f8" containerName="registry-server" Sep 29 23:35:45 crc kubenswrapper[4922]: I0929 23:35:45.958913 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:45 crc kubenswrapper[4922]: I0929 23:35:45.985506 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6rw5"] Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.137993 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-utilities\") pod \"redhat-marketplace-m6rw5\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.138690 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m5h8\" (UniqueName: \"kubernetes.io/projected/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-kube-api-access-2m5h8\") pod \"redhat-marketplace-m6rw5\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.139153 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-catalog-content\") pod \"redhat-marketplace-m6rw5\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.241309 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m5h8\" (UniqueName: \"kubernetes.io/projected/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-kube-api-access-2m5h8\") pod \"redhat-marketplace-m6rw5\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.242150 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-catalog-content\") pod \"redhat-marketplace-m6rw5\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.242756 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-catalog-content\") pod \"redhat-marketplace-m6rw5\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.243422 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-utilities\") pod \"redhat-marketplace-m6rw5\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.243072 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-utilities\") pod \"redhat-marketplace-m6rw5\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.267255 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m5h8\" (UniqueName: \"kubernetes.io/projected/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-kube-api-access-2m5h8\") pod \"redhat-marketplace-m6rw5\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.299564 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:46 crc kubenswrapper[4922]: I0929 23:35:46.788012 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6rw5"] Sep 29 23:35:47 crc kubenswrapper[4922]: I0929 23:35:47.529587 4922 generic.go:334] "Generic (PLEG): container finished" podID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerID="0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea" exitCode=0 Sep 29 23:35:47 crc kubenswrapper[4922]: I0929 23:35:47.529791 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6rw5" event={"ID":"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90","Type":"ContainerDied","Data":"0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea"} Sep 29 23:35:47 crc kubenswrapper[4922]: I0929 23:35:47.529966 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6rw5" event={"ID":"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90","Type":"ContainerStarted","Data":"d4b82989b21af7864b23afd66bc03a8440fdf35bded4b36520bfde8828b817f1"} Sep 29 23:35:48 crc kubenswrapper[4922]: I0929 23:35:48.559020 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6rw5" event={"ID":"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90","Type":"ContainerStarted","Data":"bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169"} Sep 29 23:35:49 crc kubenswrapper[4922]: I0929 23:35:49.570303 4922 generic.go:334] "Generic (PLEG): container finished" podID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerID="bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169" exitCode=0 Sep 29 23:35:49 crc kubenswrapper[4922]: I0929 23:35:49.570373 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6rw5" event={"ID":"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90","Type":"ContainerDied","Data":"bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169"} Sep 29 23:35:50 crc kubenswrapper[4922]: I0929 23:35:50.593258 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6rw5" event={"ID":"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90","Type":"ContainerStarted","Data":"5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c"} Sep 29 23:35:50 crc kubenswrapper[4922]: I0929 23:35:50.621462 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m6rw5" podStartSLOduration=3.156595863 podStartE2EDuration="5.621441822s" podCreationTimestamp="2025-09-29 23:35:45 +0000 UTC" firstStartedPulling="2025-09-29 23:35:47.532021287 +0000 UTC m=+4151.842310140" lastFinishedPulling="2025-09-29 23:35:49.996867246 +0000 UTC m=+4154.307156099" observedRunningTime="2025-09-29 23:35:50.62015838 +0000 UTC m=+4154.930447203" watchObservedRunningTime="2025-09-29 23:35:50.621441822 +0000 UTC m=+4154.931730645" Sep 29 23:35:56 crc kubenswrapper[4922]: I0929 23:35:56.300359 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:56 crc kubenswrapper[4922]: I0929 23:35:56.303463 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:56 crc kubenswrapper[4922]: I0929 23:35:56.388355 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:56 crc kubenswrapper[4922]: I0929 23:35:56.751816 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:56 crc kubenswrapper[4922]: I0929 23:35:56.847323 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6rw5"] Sep 29 23:35:58 crc kubenswrapper[4922]: I0929 23:35:58.688950 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m6rw5" podUID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerName="registry-server" containerID="cri-o://5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c" gracePeriod=2 Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.205597 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.279979 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-utilities\") pod \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.280239 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m5h8\" (UniqueName: \"kubernetes.io/projected/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-kube-api-access-2m5h8\") pod \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.280269 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-catalog-content\") pod \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\" (UID: \"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90\") " Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.286488 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-kube-api-access-2m5h8" (OuterVolumeSpecName: "kube-api-access-2m5h8") pod "ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" (UID: "ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90"). InnerVolumeSpecName "kube-api-access-2m5h8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.286573 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-utilities" (OuterVolumeSpecName: "utilities") pod "ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" (UID: "ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.302491 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" (UID: "ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.383748 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.383844 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m5h8\" (UniqueName: \"kubernetes.io/projected/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-kube-api-access-2m5h8\") on node \"crc\" DevicePath \"\"" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.383876 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.707364 4922 generic.go:334] "Generic (PLEG): container finished" podID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerID="5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c" exitCode=0 Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.707477 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6rw5" event={"ID":"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90","Type":"ContainerDied","Data":"5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c"} Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.707535 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m6rw5" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.707582 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m6rw5" event={"ID":"ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90","Type":"ContainerDied","Data":"d4b82989b21af7864b23afd66bc03a8440fdf35bded4b36520bfde8828b817f1"} Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.707620 4922 scope.go:117] "RemoveContainer" containerID="5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.752214 4922 scope.go:117] "RemoveContainer" containerID="bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.769697 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6rw5"] Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.778330 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m6rw5"] Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.793282 4922 scope.go:117] "RemoveContainer" containerID="0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.833055 4922 scope.go:117] "RemoveContainer" containerID="5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c" Sep 29 23:35:59 crc kubenswrapper[4922]: E0929 23:35:59.834142 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c\": container with ID starting with 5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c not found: ID does not exist" containerID="5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.834208 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c"} err="failed to get container status \"5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c\": rpc error: code = NotFound desc = could not find container \"5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c\": container with ID starting with 5e05d36f0db794c50143fe249cdef80147289f9e5a713a0555f7cac8e0227e5c not found: ID does not exist" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.834253 4922 scope.go:117] "RemoveContainer" containerID="bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169" Sep 29 23:35:59 crc kubenswrapper[4922]: E0929 23:35:59.834891 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169\": container with ID starting with bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169 not found: ID does not exist" containerID="bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.834983 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169"} err="failed to get container status \"bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169\": rpc error: code = NotFound desc = could not find container \"bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169\": container with ID starting with bf1492e1ed3c1f384b0e6eb8235fceeb872e8579cec4f414e9614144f380d169 not found: ID does not exist" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.835023 4922 scope.go:117] "RemoveContainer" containerID="0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea" Sep 29 23:35:59 crc kubenswrapper[4922]: E0929 23:35:59.835632 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea\": container with ID starting with 0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea not found: ID does not exist" containerID="0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea" Sep 29 23:35:59 crc kubenswrapper[4922]: I0929 23:35:59.835762 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea"} err="failed to get container status \"0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea\": rpc error: code = NotFound desc = could not find container \"0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea\": container with ID starting with 0362bf477edc1cae3359e2951879f454c60ece310d20f1ff3bb35760b1cfa9ea not found: ID does not exist" Sep 29 23:36:00 crc kubenswrapper[4922]: I0929 23:36:00.441169 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" path="/var/lib/kubelet/pods/ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90/volumes" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.710301 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xkxls"] Sep 29 23:37:14 crc kubenswrapper[4922]: E0929 23:37:14.711383 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerName="extract-content" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.711434 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerName="extract-content" Sep 29 23:37:14 crc kubenswrapper[4922]: E0929 23:37:14.711469 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerName="registry-server" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.711481 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerName="registry-server" Sep 29 23:37:14 crc kubenswrapper[4922]: E0929 23:37:14.711519 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerName="extract-utilities" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.711532 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerName="extract-utilities" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.711782 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ede73bd3-1a1a-45f1-bb9b-2ea752ffdc90" containerName="registry-server" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.715196 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.728631 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xkxls"] Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.830585 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-utilities\") pod \"certified-operators-xkxls\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.830648 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-catalog-content\") pod \"certified-operators-xkxls\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.831207 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2f8g\" (UniqueName: \"kubernetes.io/projected/0a7df9ae-0011-485d-a11b-3fbec7c46308-kube-api-access-k2f8g\") pod \"certified-operators-xkxls\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.932415 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-utilities\") pod \"certified-operators-xkxls\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.932483 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-catalog-content\") pod \"certified-operators-xkxls\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.932578 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2f8g\" (UniqueName: \"kubernetes.io/projected/0a7df9ae-0011-485d-a11b-3fbec7c46308-kube-api-access-k2f8g\") pod \"certified-operators-xkxls\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.933114 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-utilities\") pod \"certified-operators-xkxls\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.933161 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-catalog-content\") pod \"certified-operators-xkxls\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:14 crc kubenswrapper[4922]: I0929 23:37:14.958856 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2f8g\" (UniqueName: \"kubernetes.io/projected/0a7df9ae-0011-485d-a11b-3fbec7c46308-kube-api-access-k2f8g\") pod \"certified-operators-xkxls\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:15 crc kubenswrapper[4922]: I0929 23:37:15.087785 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:15 crc kubenswrapper[4922]: I0929 23:37:15.579769 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xkxls"] Sep 29 23:37:15 crc kubenswrapper[4922]: W0929 23:37:15.583280 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a7df9ae_0011_485d_a11b_3fbec7c46308.slice/crio-324febcd76a4a26542489296cc2f9f3911b4bb90c8e3c5233c6634944d5dd2ad WatchSource:0}: Error finding container 324febcd76a4a26542489296cc2f9f3911b4bb90c8e3c5233c6634944d5dd2ad: Status 404 returned error can't find the container with id 324febcd76a4a26542489296cc2f9f3911b4bb90c8e3c5233c6634944d5dd2ad Sep 29 23:37:16 crc kubenswrapper[4922]: I0929 23:37:16.471706 4922 generic.go:334] "Generic (PLEG): container finished" podID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerID="e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae" exitCode=0 Sep 29 23:37:16 crc kubenswrapper[4922]: I0929 23:37:16.471789 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xkxls" event={"ID":"0a7df9ae-0011-485d-a11b-3fbec7c46308","Type":"ContainerDied","Data":"e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae"} Sep 29 23:37:16 crc kubenswrapper[4922]: I0929 23:37:16.471929 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xkxls" event={"ID":"0a7df9ae-0011-485d-a11b-3fbec7c46308","Type":"ContainerStarted","Data":"324febcd76a4a26542489296cc2f9f3911b4bb90c8e3c5233c6634944d5dd2ad"} Sep 29 23:37:17 crc kubenswrapper[4922]: I0929 23:37:17.483907 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xkxls" event={"ID":"0a7df9ae-0011-485d-a11b-3fbec7c46308","Type":"ContainerStarted","Data":"e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139"} Sep 29 23:37:18 crc kubenswrapper[4922]: I0929 23:37:18.499149 4922 generic.go:334] "Generic (PLEG): container finished" podID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerID="e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139" exitCode=0 Sep 29 23:37:18 crc kubenswrapper[4922]: I0929 23:37:18.499286 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xkxls" event={"ID":"0a7df9ae-0011-485d-a11b-3fbec7c46308","Type":"ContainerDied","Data":"e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139"} Sep 29 23:37:19 crc kubenswrapper[4922]: I0929 23:37:19.512993 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xkxls" event={"ID":"0a7df9ae-0011-485d-a11b-3fbec7c46308","Type":"ContainerStarted","Data":"92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294"} Sep 29 23:37:19 crc kubenswrapper[4922]: I0929 23:37:19.549275 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xkxls" podStartSLOduration=3.123301103 podStartE2EDuration="5.549248649s" podCreationTimestamp="2025-09-29 23:37:14 +0000 UTC" firstStartedPulling="2025-09-29 23:37:16.47466123 +0000 UTC m=+4240.784950053" lastFinishedPulling="2025-09-29 23:37:18.900608746 +0000 UTC m=+4243.210897599" observedRunningTime="2025-09-29 23:37:19.541700752 +0000 UTC m=+4243.851989605" watchObservedRunningTime="2025-09-29 23:37:19.549248649 +0000 UTC m=+4243.859537502" Sep 29 23:37:25 crc kubenswrapper[4922]: I0929 23:37:25.089103 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:25 crc kubenswrapper[4922]: I0929 23:37:25.089647 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:25 crc kubenswrapper[4922]: I0929 23:37:25.171717 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:25 crc kubenswrapper[4922]: I0929 23:37:25.648214 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:25 crc kubenswrapper[4922]: I0929 23:37:25.750944 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xkxls"] Sep 29 23:37:27 crc kubenswrapper[4922]: I0929 23:37:27.594722 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xkxls" podUID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerName="registry-server" containerID="cri-o://92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294" gracePeriod=2 Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.604716 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.606821 4922 generic.go:334] "Generic (PLEG): container finished" podID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerID="92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294" exitCode=0 Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.606867 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xkxls" event={"ID":"0a7df9ae-0011-485d-a11b-3fbec7c46308","Type":"ContainerDied","Data":"92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294"} Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.606896 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xkxls" event={"ID":"0a7df9ae-0011-485d-a11b-3fbec7c46308","Type":"ContainerDied","Data":"324febcd76a4a26542489296cc2f9f3911b4bb90c8e3c5233c6634944d5dd2ad"} Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.606918 4922 scope.go:117] "RemoveContainer" containerID="92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.635724 4922 scope.go:117] "RemoveContainer" containerID="e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.666941 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-utilities\") pod \"0a7df9ae-0011-485d-a11b-3fbec7c46308\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.667237 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2f8g\" (UniqueName: \"kubernetes.io/projected/0a7df9ae-0011-485d-a11b-3fbec7c46308-kube-api-access-k2f8g\") pod \"0a7df9ae-0011-485d-a11b-3fbec7c46308\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.667487 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-catalog-content\") pod \"0a7df9ae-0011-485d-a11b-3fbec7c46308\" (UID: \"0a7df9ae-0011-485d-a11b-3fbec7c46308\") " Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.688523 4922 scope.go:117] "RemoveContainer" containerID="e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.689315 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-utilities" (OuterVolumeSpecName: "utilities") pod "0a7df9ae-0011-485d-a11b-3fbec7c46308" (UID: "0a7df9ae-0011-485d-a11b-3fbec7c46308"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.704599 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a7df9ae-0011-485d-a11b-3fbec7c46308-kube-api-access-k2f8g" (OuterVolumeSpecName: "kube-api-access-k2f8g") pod "0a7df9ae-0011-485d-a11b-3fbec7c46308" (UID: "0a7df9ae-0011-485d-a11b-3fbec7c46308"). InnerVolumeSpecName "kube-api-access-k2f8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.750810 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0a7df9ae-0011-485d-a11b-3fbec7c46308" (UID: "0a7df9ae-0011-485d-a11b-3fbec7c46308"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.754855 4922 scope.go:117] "RemoveContainer" containerID="92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294" Sep 29 23:37:28 crc kubenswrapper[4922]: E0929 23:37:28.755163 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294\": container with ID starting with 92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294 not found: ID does not exist" containerID="92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.755202 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294"} err="failed to get container status \"92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294\": rpc error: code = NotFound desc = could not find container \"92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294\": container with ID starting with 92669e10cca116818e709c7840385a773a49e7bd2b80ae2c1fc4b756d1b1f294 not found: ID does not exist" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.755227 4922 scope.go:117] "RemoveContainer" containerID="e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139" Sep 29 23:37:28 crc kubenswrapper[4922]: E0929 23:37:28.755702 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139\": container with ID starting with e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139 not found: ID does not exist" containerID="e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.755733 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139"} err="failed to get container status \"e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139\": rpc error: code = NotFound desc = could not find container \"e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139\": container with ID starting with e66e8e64feeb60170568b1df11d479ee25311e2c5aa6e3414748090b24ad1139 not found: ID does not exist" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.755755 4922 scope.go:117] "RemoveContainer" containerID="e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae" Sep 29 23:37:28 crc kubenswrapper[4922]: E0929 23:37:28.756030 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae\": container with ID starting with e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae not found: ID does not exist" containerID="e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.756057 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae"} err="failed to get container status \"e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae\": rpc error: code = NotFound desc = could not find container \"e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae\": container with ID starting with e6dfcd2b7071d0b7514ec8848eae2fca822dd5e4df83dfe49079fbb21560aaae not found: ID does not exist" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.770554 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.770580 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2f8g\" (UniqueName: \"kubernetes.io/projected/0a7df9ae-0011-485d-a11b-3fbec7c46308-kube-api-access-k2f8g\") on node \"crc\" DevicePath \"\"" Sep 29 23:37:28 crc kubenswrapper[4922]: I0929 23:37:28.770594 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a7df9ae-0011-485d-a11b-3fbec7c46308-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:37:29 crc kubenswrapper[4922]: I0929 23:37:29.622185 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xkxls" Sep 29 23:37:29 crc kubenswrapper[4922]: I0929 23:37:29.670948 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xkxls"] Sep 29 23:37:29 crc kubenswrapper[4922]: I0929 23:37:29.680518 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xkxls"] Sep 29 23:37:30 crc kubenswrapper[4922]: I0929 23:37:30.440457 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a7df9ae-0011-485d-a11b-3fbec7c46308" path="/var/lib/kubelet/pods/0a7df9ae-0011-485d-a11b-3fbec7c46308/volumes" Sep 29 23:37:58 crc kubenswrapper[4922]: I0929 23:37:58.913029 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:37:58 crc kubenswrapper[4922]: I0929 23:37:58.913734 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:38:28 crc kubenswrapper[4922]: I0929 23:38:28.913161 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:38:28 crc kubenswrapper[4922]: I0929 23:38:28.913940 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:38:58 crc kubenswrapper[4922]: I0929 23:38:58.912621 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:38:58 crc kubenswrapper[4922]: I0929 23:38:58.913513 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:38:58 crc kubenswrapper[4922]: I0929 23:38:58.913748 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:38:58 crc kubenswrapper[4922]: I0929 23:38:58.914631 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dd5d46348d2b4160756fb783a5d08cd46cf0fb1625e2e053deba1ede6799a64b"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:38:58 crc kubenswrapper[4922]: I0929 23:38:58.914713 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://dd5d46348d2b4160756fb783a5d08cd46cf0fb1625e2e053deba1ede6799a64b" gracePeriod=600 Sep 29 23:38:59 crc kubenswrapper[4922]: I0929 23:38:59.583697 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="dd5d46348d2b4160756fb783a5d08cd46cf0fb1625e2e053deba1ede6799a64b" exitCode=0 Sep 29 23:38:59 crc kubenswrapper[4922]: I0929 23:38:59.584602 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"dd5d46348d2b4160756fb783a5d08cd46cf0fb1625e2e053deba1ede6799a64b"} Sep 29 23:38:59 crc kubenswrapper[4922]: I0929 23:38:59.584676 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca"} Sep 29 23:38:59 crc kubenswrapper[4922]: I0929 23:38:59.584710 4922 scope.go:117] "RemoveContainer" containerID="fa8693bbb056a0c28841710b82831767981742bad2f71cf14a4282f92cebf58e" Sep 29 23:41:28 crc kubenswrapper[4922]: I0929 23:41:28.913669 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:41:28 crc kubenswrapper[4922]: I0929 23:41:28.914680 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.735922 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-rrsl6"] Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.745009 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-rrsl6"] Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.873883 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-n2wfq"] Sep 29 23:41:47 crc kubenswrapper[4922]: E0929 23:41:47.874328 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerName="registry-server" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.874357 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerName="registry-server" Sep 29 23:41:47 crc kubenswrapper[4922]: E0929 23:41:47.874387 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerName="extract-content" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.874435 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerName="extract-content" Sep 29 23:41:47 crc kubenswrapper[4922]: E0929 23:41:47.874468 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerName="extract-utilities" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.874486 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerName="extract-utilities" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.874748 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a7df9ae-0011-485d-a11b-3fbec7c46308" containerName="registry-server" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.875790 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.879497 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.879723 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.879787 4922 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-w4jls" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.880060 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Sep 29 23:41:47 crc kubenswrapper[4922]: I0929 23:41:47.887613 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-n2wfq"] Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.034654 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtq4s\" (UniqueName: \"kubernetes.io/projected/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-kube-api-access-xtq4s\") pod \"crc-storage-crc-n2wfq\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.035053 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-node-mnt\") pod \"crc-storage-crc-n2wfq\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.035166 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-crc-storage\") pod \"crc-storage-crc-n2wfq\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.137120 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtq4s\" (UniqueName: \"kubernetes.io/projected/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-kube-api-access-xtq4s\") pod \"crc-storage-crc-n2wfq\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.137249 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-node-mnt\") pod \"crc-storage-crc-n2wfq\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.137300 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-crc-storage\") pod \"crc-storage-crc-n2wfq\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.137708 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-node-mnt\") pod \"crc-storage-crc-n2wfq\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.138246 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-crc-storage\") pod \"crc-storage-crc-n2wfq\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.170438 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtq4s\" (UniqueName: \"kubernetes.io/projected/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-kube-api-access-xtq4s\") pod \"crc-storage-crc-n2wfq\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.222172 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.437734 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85" path="/var/lib/kubelet/pods/74e678e6-0b3d-4f5f-b8f5-2e3fa69eab85/volumes" Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.740019 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-n2wfq"] Sep 29 23:41:48 crc kubenswrapper[4922]: I0929 23:41:48.756599 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 23:41:49 crc kubenswrapper[4922]: I0929 23:41:49.283717 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-n2wfq" event={"ID":"e0cfedb1-5d20-4910-8cac-8f3d0a609dea","Type":"ContainerStarted","Data":"12c91c18e8ad2ea5264b22be8d7b690a8a0d74005e610f5a91105e30de2ee14f"} Sep 29 23:41:50 crc kubenswrapper[4922]: I0929 23:41:50.290300 4922 generic.go:334] "Generic (PLEG): container finished" podID="e0cfedb1-5d20-4910-8cac-8f3d0a609dea" containerID="b9919a7da1b8d3055262e0f45c29ec6337384e5add24e1ebc579e6b70f13b5c9" exitCode=0 Sep 29 23:41:50 crc kubenswrapper[4922]: I0929 23:41:50.290585 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-n2wfq" event={"ID":"e0cfedb1-5d20-4910-8cac-8f3d0a609dea","Type":"ContainerDied","Data":"b9919a7da1b8d3055262e0f45c29ec6337384e5add24e1ebc579e6b70f13b5c9"} Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.483073 4922 scope.go:117] "RemoveContainer" containerID="e5a6270e8307b919f32a818b901ebb752e448dbb0d51061f483347e948fddfd9" Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.692296 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.809187 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-crc-storage\") pod \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.809252 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-node-mnt\") pod \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.809324 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtq4s\" (UniqueName: \"kubernetes.io/projected/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-kube-api-access-xtq4s\") pod \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\" (UID: \"e0cfedb1-5d20-4910-8cac-8f3d0a609dea\") " Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.810369 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "e0cfedb1-5d20-4910-8cac-8f3d0a609dea" (UID: "e0cfedb1-5d20-4910-8cac-8f3d0a609dea"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.825923 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-kube-api-access-xtq4s" (OuterVolumeSpecName: "kube-api-access-xtq4s") pod "e0cfedb1-5d20-4910-8cac-8f3d0a609dea" (UID: "e0cfedb1-5d20-4910-8cac-8f3d0a609dea"). InnerVolumeSpecName "kube-api-access-xtq4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.843876 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "e0cfedb1-5d20-4910-8cac-8f3d0a609dea" (UID: "e0cfedb1-5d20-4910-8cac-8f3d0a609dea"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.910882 4922 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-crc-storage\") on node \"crc\" DevicePath \"\"" Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.910921 4922 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-node-mnt\") on node \"crc\" DevicePath \"\"" Sep 29 23:41:51 crc kubenswrapper[4922]: I0929 23:41:51.910930 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtq4s\" (UniqueName: \"kubernetes.io/projected/e0cfedb1-5d20-4910-8cac-8f3d0a609dea-kube-api-access-xtq4s\") on node \"crc\" DevicePath \"\"" Sep 29 23:41:52 crc kubenswrapper[4922]: I0929 23:41:52.310381 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-n2wfq" event={"ID":"e0cfedb1-5d20-4910-8cac-8f3d0a609dea","Type":"ContainerDied","Data":"12c91c18e8ad2ea5264b22be8d7b690a8a0d74005e610f5a91105e30de2ee14f"} Sep 29 23:41:52 crc kubenswrapper[4922]: I0929 23:41:52.310510 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12c91c18e8ad2ea5264b22be8d7b690a8a0d74005e610f5a91105e30de2ee14f" Sep 29 23:41:52 crc kubenswrapper[4922]: I0929 23:41:52.310463 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-n2wfq" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.287591 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-n2wfq"] Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.298652 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-n2wfq"] Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.461588 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0cfedb1-5d20-4910-8cac-8f3d0a609dea" path="/var/lib/kubelet/pods/e0cfedb1-5d20-4910-8cac-8f3d0a609dea/volumes" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.476907 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-2g927"] Sep 29 23:41:54 crc kubenswrapper[4922]: E0929 23:41:54.477249 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0cfedb1-5d20-4910-8cac-8f3d0a609dea" containerName="storage" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.477271 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0cfedb1-5d20-4910-8cac-8f3d0a609dea" containerName="storage" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.477427 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0cfedb1-5d20-4910-8cac-8f3d0a609dea" containerName="storage" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.477984 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.481162 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.481265 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.481669 4922 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-w4jls" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.481754 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.502675 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-2g927"] Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.655559 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-crc-storage\") pod \"crc-storage-crc-2g927\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.655606 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtqpj\" (UniqueName: \"kubernetes.io/projected/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-kube-api-access-dtqpj\") pod \"crc-storage-crc-2g927\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.655800 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-node-mnt\") pod \"crc-storage-crc-2g927\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.756818 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-crc-storage\") pod \"crc-storage-crc-2g927\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.756860 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtqpj\" (UniqueName: \"kubernetes.io/projected/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-kube-api-access-dtqpj\") pod \"crc-storage-crc-2g927\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.756914 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-node-mnt\") pod \"crc-storage-crc-2g927\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.757107 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-node-mnt\") pod \"crc-storage-crc-2g927\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.757671 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-crc-storage\") pod \"crc-storage-crc-2g927\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.773426 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtqpj\" (UniqueName: \"kubernetes.io/projected/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-kube-api-access-dtqpj\") pod \"crc-storage-crc-2g927\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:54 crc kubenswrapper[4922]: I0929 23:41:54.865724 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:55 crc kubenswrapper[4922]: I0929 23:41:55.416688 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-2g927"] Sep 29 23:41:56 crc kubenswrapper[4922]: I0929 23:41:56.353881 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2g927" event={"ID":"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1","Type":"ContainerStarted","Data":"0a6956a378b1c182ce3bdc4057c11d269e8e6ef79ea6b4b13a76c11902019f31"} Sep 29 23:41:57 crc kubenswrapper[4922]: I0929 23:41:57.366120 4922 generic.go:334] "Generic (PLEG): container finished" podID="2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1" containerID="e0c044d1f628488310b18d20338f9fa4eb74ae306c897fe4321b9de2cda42c96" exitCode=0 Sep 29 23:41:57 crc kubenswrapper[4922]: I0929 23:41:57.366180 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2g927" event={"ID":"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1","Type":"ContainerDied","Data":"e0c044d1f628488310b18d20338f9fa4eb74ae306c897fe4321b9de2cda42c96"} Sep 29 23:41:58 crc kubenswrapper[4922]: I0929 23:41:58.839219 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:41:58 crc kubenswrapper[4922]: I0929 23:41:58.912474 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:41:58 crc kubenswrapper[4922]: I0929 23:41:58.912563 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.029466 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtqpj\" (UniqueName: \"kubernetes.io/projected/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-kube-api-access-dtqpj\") pod \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.029529 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-crc-storage\") pod \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.029687 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-node-mnt\") pod \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\" (UID: \"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1\") " Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.029902 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1" (UID: "2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.030192 4922 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-node-mnt\") on node \"crc\" DevicePath \"\"" Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.039615 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-kube-api-access-dtqpj" (OuterVolumeSpecName: "kube-api-access-dtqpj") pod "2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1" (UID: "2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1"). InnerVolumeSpecName "kube-api-access-dtqpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.054066 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1" (UID: "2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.131459 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtqpj\" (UniqueName: \"kubernetes.io/projected/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-kube-api-access-dtqpj\") on node \"crc\" DevicePath \"\"" Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.131513 4922 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1-crc-storage\") on node \"crc\" DevicePath \"\"" Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.399236 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2g927" event={"ID":"2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1","Type":"ContainerDied","Data":"0a6956a378b1c182ce3bdc4057c11d269e8e6ef79ea6b4b13a76c11902019f31"} Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.399287 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a6956a378b1c182ce3bdc4057c11d269e8e6ef79ea6b4b13a76c11902019f31" Sep 29 23:41:59 crc kubenswrapper[4922]: I0929 23:41:59.399302 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2g927" Sep 29 23:42:28 crc kubenswrapper[4922]: I0929 23:42:28.913478 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:42:28 crc kubenswrapper[4922]: I0929 23:42:28.914420 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:42:28 crc kubenswrapper[4922]: I0929 23:42:28.914508 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:42:28 crc kubenswrapper[4922]: I0929 23:42:28.915668 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:42:28 crc kubenswrapper[4922]: I0929 23:42:28.915793 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" gracePeriod=600 Sep 29 23:42:29 crc kubenswrapper[4922]: E0929 23:42:29.051562 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:42:29 crc kubenswrapper[4922]: I0929 23:42:29.690761 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" exitCode=0 Sep 29 23:42:29 crc kubenswrapper[4922]: I0929 23:42:29.690807 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca"} Sep 29 23:42:29 crc kubenswrapper[4922]: I0929 23:42:29.690874 4922 scope.go:117] "RemoveContainer" containerID="dd5d46348d2b4160756fb783a5d08cd46cf0fb1625e2e053deba1ede6799a64b" Sep 29 23:42:29 crc kubenswrapper[4922]: I0929 23:42:29.691879 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:42:29 crc kubenswrapper[4922]: E0929 23:42:29.692896 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:42:45 crc kubenswrapper[4922]: I0929 23:42:45.422591 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:42:45 crc kubenswrapper[4922]: E0929 23:42:45.423641 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:43:00 crc kubenswrapper[4922]: I0929 23:43:00.422489 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:43:00 crc kubenswrapper[4922]: E0929 23:43:00.423735 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:43:14 crc kubenswrapper[4922]: I0929 23:43:14.421932 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:43:14 crc kubenswrapper[4922]: E0929 23:43:14.422944 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:43:28 crc kubenswrapper[4922]: I0929 23:43:28.422656 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:43:28 crc kubenswrapper[4922]: E0929 23:43:28.423891 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:43:44 crc kubenswrapper[4922]: I0929 23:43:44.422375 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:43:44 crc kubenswrapper[4922]: E0929 23:43:44.440809 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.023371 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ds6md"] Sep 29 23:43:45 crc kubenswrapper[4922]: E0929 23:43:45.024364 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1" containerName="storage" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.024387 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1" containerName="storage" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.024724 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a7d2edb-ea06-42f7-9f86-a4b8bf8274b1" containerName="storage" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.026688 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.033999 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ds6md"] Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.163797 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2597r\" (UniqueName: \"kubernetes.io/projected/ae34cb57-70b1-4000-b383-a101c76b4f42-kube-api-access-2597r\") pod \"redhat-operators-ds6md\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.163879 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-catalog-content\") pod \"redhat-operators-ds6md\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.163920 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-utilities\") pod \"redhat-operators-ds6md\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.265109 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-catalog-content\") pod \"redhat-operators-ds6md\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.265228 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-utilities\") pod \"redhat-operators-ds6md\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.265510 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2597r\" (UniqueName: \"kubernetes.io/projected/ae34cb57-70b1-4000-b383-a101c76b4f42-kube-api-access-2597r\") pod \"redhat-operators-ds6md\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.265861 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-utilities\") pod \"redhat-operators-ds6md\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.265867 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-catalog-content\") pod \"redhat-operators-ds6md\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.298902 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2597r\" (UniqueName: \"kubernetes.io/projected/ae34cb57-70b1-4000-b383-a101c76b4f42-kube-api-access-2597r\") pod \"redhat-operators-ds6md\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.367227 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:45 crc kubenswrapper[4922]: I0929 23:43:45.635924 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ds6md"] Sep 29 23:43:46 crc kubenswrapper[4922]: I0929 23:43:46.443124 4922 generic.go:334] "Generic (PLEG): container finished" podID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerID="cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9" exitCode=0 Sep 29 23:43:46 crc kubenswrapper[4922]: I0929 23:43:46.443278 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ds6md" event={"ID":"ae34cb57-70b1-4000-b383-a101c76b4f42","Type":"ContainerDied","Data":"cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9"} Sep 29 23:43:46 crc kubenswrapper[4922]: I0929 23:43:46.443502 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ds6md" event={"ID":"ae34cb57-70b1-4000-b383-a101c76b4f42","Type":"ContainerStarted","Data":"50e6ceea6c0e2e2e7958ae94cc1781bcd62a517f002018d11eb2da91710d953b"} Sep 29 23:43:48 crc kubenswrapper[4922]: I0929 23:43:48.464617 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ds6md" event={"ID":"ae34cb57-70b1-4000-b383-a101c76b4f42","Type":"ContainerStarted","Data":"fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f"} Sep 29 23:43:49 crc kubenswrapper[4922]: I0929 23:43:49.474351 4922 generic.go:334] "Generic (PLEG): container finished" podID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerID="fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f" exitCode=0 Sep 29 23:43:49 crc kubenswrapper[4922]: I0929 23:43:49.474465 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ds6md" event={"ID":"ae34cb57-70b1-4000-b383-a101c76b4f42","Type":"ContainerDied","Data":"fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f"} Sep 29 23:43:50 crc kubenswrapper[4922]: I0929 23:43:50.487699 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ds6md" event={"ID":"ae34cb57-70b1-4000-b383-a101c76b4f42","Type":"ContainerStarted","Data":"57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d"} Sep 29 23:43:50 crc kubenswrapper[4922]: I0929 23:43:50.523554 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ds6md" podStartSLOduration=3.119763324 podStartE2EDuration="6.523530181s" podCreationTimestamp="2025-09-29 23:43:44 +0000 UTC" firstStartedPulling="2025-09-29 23:43:46.445077758 +0000 UTC m=+4630.755366611" lastFinishedPulling="2025-09-29 23:43:49.848844625 +0000 UTC m=+4634.159133468" observedRunningTime="2025-09-29 23:43:50.517932403 +0000 UTC m=+4634.828221256" watchObservedRunningTime="2025-09-29 23:43:50.523530181 +0000 UTC m=+4634.833819034" Sep 29 23:43:55 crc kubenswrapper[4922]: I0929 23:43:55.367608 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:55 crc kubenswrapper[4922]: I0929 23:43:55.369464 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:43:56 crc kubenswrapper[4922]: I0929 23:43:56.434192 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ds6md" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerName="registry-server" probeResult="failure" output=< Sep 29 23:43:56 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 29 23:43:56 crc kubenswrapper[4922]: > Sep 29 23:43:57 crc kubenswrapper[4922]: I0929 23:43:57.422947 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:43:57 crc kubenswrapper[4922]: E0929 23:43:57.423509 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:44:05 crc kubenswrapper[4922]: I0929 23:44:05.434581 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:44:05 crc kubenswrapper[4922]: I0929 23:44:05.500305 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:44:05 crc kubenswrapper[4922]: I0929 23:44:05.675497 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ds6md"] Sep 29 23:44:06 crc kubenswrapper[4922]: I0929 23:44:06.650354 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ds6md" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerName="registry-server" containerID="cri-o://57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d" gracePeriod=2 Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.089151 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.152005 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-utilities\") pod \"ae34cb57-70b1-4000-b383-a101c76b4f42\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.152081 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2597r\" (UniqueName: \"kubernetes.io/projected/ae34cb57-70b1-4000-b383-a101c76b4f42-kube-api-access-2597r\") pod \"ae34cb57-70b1-4000-b383-a101c76b4f42\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.153626 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-utilities" (OuterVolumeSpecName: "utilities") pod "ae34cb57-70b1-4000-b383-a101c76b4f42" (UID: "ae34cb57-70b1-4000-b383-a101c76b4f42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.154050 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-catalog-content\") pod \"ae34cb57-70b1-4000-b383-a101c76b4f42\" (UID: \"ae34cb57-70b1-4000-b383-a101c76b4f42\") " Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.154572 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.163722 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae34cb57-70b1-4000-b383-a101c76b4f42-kube-api-access-2597r" (OuterVolumeSpecName: "kube-api-access-2597r") pod "ae34cb57-70b1-4000-b383-a101c76b4f42" (UID: "ae34cb57-70b1-4000-b383-a101c76b4f42"). InnerVolumeSpecName "kube-api-access-2597r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.254739 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae34cb57-70b1-4000-b383-a101c76b4f42" (UID: "ae34cb57-70b1-4000-b383-a101c76b4f42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.256021 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2597r\" (UniqueName: \"kubernetes.io/projected/ae34cb57-70b1-4000-b383-a101c76b4f42-kube-api-access-2597r\") on node \"crc\" DevicePath \"\"" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.256057 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae34cb57-70b1-4000-b383-a101c76b4f42-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.662495 4922 generic.go:334] "Generic (PLEG): container finished" podID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerID="57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d" exitCode=0 Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.662550 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ds6md" event={"ID":"ae34cb57-70b1-4000-b383-a101c76b4f42","Type":"ContainerDied","Data":"57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d"} Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.662597 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ds6md" event={"ID":"ae34cb57-70b1-4000-b383-a101c76b4f42","Type":"ContainerDied","Data":"50e6ceea6c0e2e2e7958ae94cc1781bcd62a517f002018d11eb2da91710d953b"} Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.662600 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ds6md" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.662618 4922 scope.go:117] "RemoveContainer" containerID="57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.698013 4922 scope.go:117] "RemoveContainer" containerID="fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.723075 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ds6md"] Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.735636 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ds6md"] Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.749764 4922 scope.go:117] "RemoveContainer" containerID="cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.783118 4922 scope.go:117] "RemoveContainer" containerID="57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d" Sep 29 23:44:07 crc kubenswrapper[4922]: E0929 23:44:07.783641 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d\": container with ID starting with 57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d not found: ID does not exist" containerID="57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.783707 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d"} err="failed to get container status \"57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d\": rpc error: code = NotFound desc = could not find container \"57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d\": container with ID starting with 57d73be32ade5988b62ddae228926387d4ae1ce622515f5c9d3f27d7c54fdf6d not found: ID does not exist" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.783749 4922 scope.go:117] "RemoveContainer" containerID="fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f" Sep 29 23:44:07 crc kubenswrapper[4922]: E0929 23:44:07.784364 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f\": container with ID starting with fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f not found: ID does not exist" containerID="fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.784433 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f"} err="failed to get container status \"fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f\": rpc error: code = NotFound desc = could not find container \"fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f\": container with ID starting with fd92c29164a085f422e6de9d90a386f64787a9d1456cada6012a7d8f272a4a4f not found: ID does not exist" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.784461 4922 scope.go:117] "RemoveContainer" containerID="cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9" Sep 29 23:44:07 crc kubenswrapper[4922]: E0929 23:44:07.785119 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9\": container with ID starting with cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9 not found: ID does not exist" containerID="cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9" Sep 29 23:44:07 crc kubenswrapper[4922]: I0929 23:44:07.785366 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9"} err="failed to get container status \"cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9\": rpc error: code = NotFound desc = could not find container \"cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9\": container with ID starting with cab3f5922f67a4adacd94f0e24776510e7ac66713dcb19270ed2602686b769c9 not found: ID does not exist" Sep 29 23:44:08 crc kubenswrapper[4922]: I0929 23:44:08.432623 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" path="/var/lib/kubelet/pods/ae34cb57-70b1-4000-b383-a101c76b4f42/volumes" Sep 29 23:44:09 crc kubenswrapper[4922]: I0929 23:44:09.422711 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:44:09 crc kubenswrapper[4922]: E0929 23:44:09.423094 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.673613 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mldwz"] Sep 29 23:44:17 crc kubenswrapper[4922]: E0929 23:44:17.674587 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerName="extract-utilities" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.674612 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerName="extract-utilities" Sep 29 23:44:17 crc kubenswrapper[4922]: E0929 23:44:17.674643 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerName="registry-server" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.674655 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerName="registry-server" Sep 29 23:44:17 crc kubenswrapper[4922]: E0929 23:44:17.674677 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerName="extract-content" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.674687 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerName="extract-content" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.674877 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae34cb57-70b1-4000-b383-a101c76b4f42" containerName="registry-server" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.676494 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.690875 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mldwz"] Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.731488 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-catalog-content\") pod \"community-operators-mldwz\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.731578 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-utilities\") pod \"community-operators-mldwz\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.731610 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsnq7\" (UniqueName: \"kubernetes.io/projected/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-kube-api-access-vsnq7\") pod \"community-operators-mldwz\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.833776 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-catalog-content\") pod \"community-operators-mldwz\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.833866 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-utilities\") pod \"community-operators-mldwz\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.833910 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsnq7\" (UniqueName: \"kubernetes.io/projected/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-kube-api-access-vsnq7\") pod \"community-operators-mldwz\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.834370 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-catalog-content\") pod \"community-operators-mldwz\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.834432 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-utilities\") pod \"community-operators-mldwz\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:17 crc kubenswrapper[4922]: I0929 23:44:17.862240 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsnq7\" (UniqueName: \"kubernetes.io/projected/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-kube-api-access-vsnq7\") pod \"community-operators-mldwz\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:18 crc kubenswrapper[4922]: I0929 23:44:18.009575 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:18 crc kubenswrapper[4922]: I0929 23:44:18.559298 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mldwz"] Sep 29 23:44:18 crc kubenswrapper[4922]: W0929 23:44:18.563758 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f6c597e_a5bc_4dd8_8de9_30608f01bd82.slice/crio-67df8df0f8937f7e28f2031964d3b0562cc28a20a6169464b47d3a5c6952f68f WatchSource:0}: Error finding container 67df8df0f8937f7e28f2031964d3b0562cc28a20a6169464b47d3a5c6952f68f: Status 404 returned error can't find the container with id 67df8df0f8937f7e28f2031964d3b0562cc28a20a6169464b47d3a5c6952f68f Sep 29 23:44:18 crc kubenswrapper[4922]: I0929 23:44:18.776834 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mldwz" event={"ID":"7f6c597e-a5bc-4dd8-8de9-30608f01bd82","Type":"ContainerStarted","Data":"cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913"} Sep 29 23:44:18 crc kubenswrapper[4922]: I0929 23:44:18.777182 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mldwz" event={"ID":"7f6c597e-a5bc-4dd8-8de9-30608f01bd82","Type":"ContainerStarted","Data":"67df8df0f8937f7e28f2031964d3b0562cc28a20a6169464b47d3a5c6952f68f"} Sep 29 23:44:19 crc kubenswrapper[4922]: I0929 23:44:19.791229 4922 generic.go:334] "Generic (PLEG): container finished" podID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerID="cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913" exitCode=0 Sep 29 23:44:19 crc kubenswrapper[4922]: I0929 23:44:19.791319 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mldwz" event={"ID":"7f6c597e-a5bc-4dd8-8de9-30608f01bd82","Type":"ContainerDied","Data":"cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913"} Sep 29 23:44:20 crc kubenswrapper[4922]: I0929 23:44:20.844828 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mldwz" event={"ID":"7f6c597e-a5bc-4dd8-8de9-30608f01bd82","Type":"ContainerStarted","Data":"bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41"} Sep 29 23:44:21 crc kubenswrapper[4922]: I0929 23:44:21.856381 4922 generic.go:334] "Generic (PLEG): container finished" podID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerID="bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41" exitCode=0 Sep 29 23:44:21 crc kubenswrapper[4922]: I0929 23:44:21.856471 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mldwz" event={"ID":"7f6c597e-a5bc-4dd8-8de9-30608f01bd82","Type":"ContainerDied","Data":"bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41"} Sep 29 23:44:22 crc kubenswrapper[4922]: I0929 23:44:22.421823 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:44:22 crc kubenswrapper[4922]: E0929 23:44:22.422692 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:44:22 crc kubenswrapper[4922]: I0929 23:44:22.865740 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mldwz" event={"ID":"7f6c597e-a5bc-4dd8-8de9-30608f01bd82","Type":"ContainerStarted","Data":"d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c"} Sep 29 23:44:28 crc kubenswrapper[4922]: I0929 23:44:28.010502 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:28 crc kubenswrapper[4922]: I0929 23:44:28.010932 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:28 crc kubenswrapper[4922]: I0929 23:44:28.085801 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:28 crc kubenswrapper[4922]: I0929 23:44:28.124661 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mldwz" podStartSLOduration=8.541708376 podStartE2EDuration="11.124635339s" podCreationTimestamp="2025-09-29 23:44:17 +0000 UTC" firstStartedPulling="2025-09-29 23:44:19.794017389 +0000 UTC m=+4664.104306232" lastFinishedPulling="2025-09-29 23:44:22.376944352 +0000 UTC m=+4666.687233195" observedRunningTime="2025-09-29 23:44:22.893104856 +0000 UTC m=+4667.203393709" watchObservedRunningTime="2025-09-29 23:44:28.124635339 +0000 UTC m=+4672.434924192" Sep 29 23:44:29 crc kubenswrapper[4922]: I0929 23:44:29.030495 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:29 crc kubenswrapper[4922]: I0929 23:44:29.101621 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mldwz"] Sep 29 23:44:30 crc kubenswrapper[4922]: I0929 23:44:30.936023 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mldwz" podUID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerName="registry-server" containerID="cri-o://d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c" gracePeriod=2 Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.905301 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.949507 4922 generic.go:334] "Generic (PLEG): container finished" podID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerID="d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c" exitCode=0 Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.949570 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mldwz" event={"ID":"7f6c597e-a5bc-4dd8-8de9-30608f01bd82","Type":"ContainerDied","Data":"d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c"} Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.949586 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mldwz" Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.949607 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mldwz" event={"ID":"7f6c597e-a5bc-4dd8-8de9-30608f01bd82","Type":"ContainerDied","Data":"67df8df0f8937f7e28f2031964d3b0562cc28a20a6169464b47d3a5c6952f68f"} Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.949642 4922 scope.go:117] "RemoveContainer" containerID="d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c" Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.958055 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-catalog-content\") pod \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.958137 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsnq7\" (UniqueName: \"kubernetes.io/projected/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-kube-api-access-vsnq7\") pod \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.958228 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-utilities\") pod \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\" (UID: \"7f6c597e-a5bc-4dd8-8de9-30608f01bd82\") " Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.959515 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-utilities" (OuterVolumeSpecName: "utilities") pod "7f6c597e-a5bc-4dd8-8de9-30608f01bd82" (UID: "7f6c597e-a5bc-4dd8-8de9-30608f01bd82"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.984157 4922 scope.go:117] "RemoveContainer" containerID="bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41" Sep 29 23:44:31 crc kubenswrapper[4922]: I0929 23:44:31.984168 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-kube-api-access-vsnq7" (OuterVolumeSpecName: "kube-api-access-vsnq7") pod "7f6c597e-a5bc-4dd8-8de9-30608f01bd82" (UID: "7f6c597e-a5bc-4dd8-8de9-30608f01bd82"). InnerVolumeSpecName "kube-api-access-vsnq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.020989 4922 scope.go:117] "RemoveContainer" containerID="cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.028779 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7f6c597e-a5bc-4dd8-8de9-30608f01bd82" (UID: "7f6c597e-a5bc-4dd8-8de9-30608f01bd82"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.042653 4922 scope.go:117] "RemoveContainer" containerID="d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c" Sep 29 23:44:32 crc kubenswrapper[4922]: E0929 23:44:32.043219 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c\": container with ID starting with d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c not found: ID does not exist" containerID="d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.043268 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c"} err="failed to get container status \"d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c\": rpc error: code = NotFound desc = could not find container \"d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c\": container with ID starting with d8d953370812858bf4adcdb106bfd98c92998fba7806f8eced9ed4abf9949c0c not found: ID does not exist" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.043299 4922 scope.go:117] "RemoveContainer" containerID="bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41" Sep 29 23:44:32 crc kubenswrapper[4922]: E0929 23:44:32.045282 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41\": container with ID starting with bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41 not found: ID does not exist" containerID="bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.045304 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41"} err="failed to get container status \"bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41\": rpc error: code = NotFound desc = could not find container \"bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41\": container with ID starting with bd1ffa87e99b0e2dffd93d5d4109c9ee9c040f2d4c8aa385302360b57ea59b41 not found: ID does not exist" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.045318 4922 scope.go:117] "RemoveContainer" containerID="cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913" Sep 29 23:44:32 crc kubenswrapper[4922]: E0929 23:44:32.045734 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913\": container with ID starting with cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913 not found: ID does not exist" containerID="cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.045778 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913"} err="failed to get container status \"cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913\": rpc error: code = NotFound desc = could not find container \"cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913\": container with ID starting with cb815646f60ca988c66a01de0d729aab4f4ffdad84382329e2c4db4db2cb8913 not found: ID does not exist" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.060671 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.060707 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsnq7\" (UniqueName: \"kubernetes.io/projected/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-kube-api-access-vsnq7\") on node \"crc\" DevicePath \"\"" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.060723 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f6c597e-a5bc-4dd8-8de9-30608f01bd82-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.298308 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mldwz"] Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.311130 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mldwz"] Sep 29 23:44:32 crc kubenswrapper[4922]: I0929 23:44:32.435202 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" path="/var/lib/kubelet/pods/7f6c597e-a5bc-4dd8-8de9-30608f01bd82/volumes" Sep 29 23:44:36 crc kubenswrapper[4922]: I0929 23:44:36.425840 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:44:36 crc kubenswrapper[4922]: E0929 23:44:36.426738 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:44:49 crc kubenswrapper[4922]: I0929 23:44:49.422410 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:44:49 crc kubenswrapper[4922]: E0929 23:44:49.425448 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.155495 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq"] Sep 29 23:45:00 crc kubenswrapper[4922]: E0929 23:45:00.156240 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerName="extract-content" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.156251 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerName="extract-content" Sep 29 23:45:00 crc kubenswrapper[4922]: E0929 23:45:00.156266 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerName="registry-server" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.156272 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerName="registry-server" Sep 29 23:45:00 crc kubenswrapper[4922]: E0929 23:45:00.156282 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerName="extract-utilities" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.156288 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerName="extract-utilities" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.156434 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f6c597e-a5bc-4dd8-8de9-30608f01bd82" containerName="registry-server" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.156907 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.159668 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.160588 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.167799 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq"] Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.263686 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/769ecc0b-2d3a-433d-b206-55953f8a6169-secret-volume\") pod \"collect-profiles-29319825-b5njq\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.263797 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/769ecc0b-2d3a-433d-b206-55953f8a6169-config-volume\") pod \"collect-profiles-29319825-b5njq\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.263846 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n667g\" (UniqueName: \"kubernetes.io/projected/769ecc0b-2d3a-433d-b206-55953f8a6169-kube-api-access-n667g\") pod \"collect-profiles-29319825-b5njq\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.365130 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/769ecc0b-2d3a-433d-b206-55953f8a6169-secret-volume\") pod \"collect-profiles-29319825-b5njq\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.365250 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/769ecc0b-2d3a-433d-b206-55953f8a6169-config-volume\") pod \"collect-profiles-29319825-b5njq\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.365301 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n667g\" (UniqueName: \"kubernetes.io/projected/769ecc0b-2d3a-433d-b206-55953f8a6169-kube-api-access-n667g\") pod \"collect-profiles-29319825-b5njq\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.367081 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/769ecc0b-2d3a-433d-b206-55953f8a6169-config-volume\") pod \"collect-profiles-29319825-b5njq\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.376196 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/769ecc0b-2d3a-433d-b206-55953f8a6169-secret-volume\") pod \"collect-profiles-29319825-b5njq\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.396614 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n667g\" (UniqueName: \"kubernetes.io/projected/769ecc0b-2d3a-433d-b206-55953f8a6169-kube-api-access-n667g\") pod \"collect-profiles-29319825-b5njq\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.488242 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:00 crc kubenswrapper[4922]: I0929 23:45:00.964779 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq"] Sep 29 23:45:01 crc kubenswrapper[4922]: I0929 23:45:01.220155 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" event={"ID":"769ecc0b-2d3a-433d-b206-55953f8a6169","Type":"ContainerStarted","Data":"4076f5294483d8b1605d5bb09fac961a9b4d0ad00df19af05e9a7d86edef49b4"} Sep 29 23:45:01 crc kubenswrapper[4922]: I0929 23:45:01.220538 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" event={"ID":"769ecc0b-2d3a-433d-b206-55953f8a6169","Type":"ContainerStarted","Data":"4c1d9ec3f01c5ed3d31726c1ed645973808e05b91f86f2761d53f697b21865a6"} Sep 29 23:45:01 crc kubenswrapper[4922]: I0929 23:45:01.248869 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" podStartSLOduration=1.248849717 podStartE2EDuration="1.248849717s" podCreationTimestamp="2025-09-29 23:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:45:01.247300089 +0000 UTC m=+4705.557588942" watchObservedRunningTime="2025-09-29 23:45:01.248849717 +0000 UTC m=+4705.559138540" Sep 29 23:45:02 crc kubenswrapper[4922]: I0929 23:45:02.234600 4922 generic.go:334] "Generic (PLEG): container finished" podID="769ecc0b-2d3a-433d-b206-55953f8a6169" containerID="4076f5294483d8b1605d5bb09fac961a9b4d0ad00df19af05e9a7d86edef49b4" exitCode=0 Sep 29 23:45:02 crc kubenswrapper[4922]: I0929 23:45:02.234663 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" event={"ID":"769ecc0b-2d3a-433d-b206-55953f8a6169","Type":"ContainerDied","Data":"4076f5294483d8b1605d5bb09fac961a9b4d0ad00df19af05e9a7d86edef49b4"} Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.422909 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:45:03 crc kubenswrapper[4922]: E0929 23:45:03.424156 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.599224 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.626168 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/769ecc0b-2d3a-433d-b206-55953f8a6169-config-volume\") pod \"769ecc0b-2d3a-433d-b206-55953f8a6169\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.626254 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n667g\" (UniqueName: \"kubernetes.io/projected/769ecc0b-2d3a-433d-b206-55953f8a6169-kube-api-access-n667g\") pod \"769ecc0b-2d3a-433d-b206-55953f8a6169\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.626502 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/769ecc0b-2d3a-433d-b206-55953f8a6169-secret-volume\") pod \"769ecc0b-2d3a-433d-b206-55953f8a6169\" (UID: \"769ecc0b-2d3a-433d-b206-55953f8a6169\") " Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.627347 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/769ecc0b-2d3a-433d-b206-55953f8a6169-config-volume" (OuterVolumeSpecName: "config-volume") pod "769ecc0b-2d3a-433d-b206-55953f8a6169" (UID: "769ecc0b-2d3a-433d-b206-55953f8a6169"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.656510 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/769ecc0b-2d3a-433d-b206-55953f8a6169-kube-api-access-n667g" (OuterVolumeSpecName: "kube-api-access-n667g") pod "769ecc0b-2d3a-433d-b206-55953f8a6169" (UID: "769ecc0b-2d3a-433d-b206-55953f8a6169"). InnerVolumeSpecName "kube-api-access-n667g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.657051 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769ecc0b-2d3a-433d-b206-55953f8a6169-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "769ecc0b-2d3a-433d-b206-55953f8a6169" (UID: "769ecc0b-2d3a-433d-b206-55953f8a6169"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.728563 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/769ecc0b-2d3a-433d-b206-55953f8a6169-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.728902 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/769ecc0b-2d3a-433d-b206-55953f8a6169-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 23:45:03 crc kubenswrapper[4922]: I0929 23:45:03.729093 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n667g\" (UniqueName: \"kubernetes.io/projected/769ecc0b-2d3a-433d-b206-55953f8a6169-kube-api-access-n667g\") on node \"crc\" DevicePath \"\"" Sep 29 23:45:04 crc kubenswrapper[4922]: I0929 23:45:04.250742 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" event={"ID":"769ecc0b-2d3a-433d-b206-55953f8a6169","Type":"ContainerDied","Data":"4c1d9ec3f01c5ed3d31726c1ed645973808e05b91f86f2761d53f697b21865a6"} Sep 29 23:45:04 crc kubenswrapper[4922]: I0929 23:45:04.251100 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c1d9ec3f01c5ed3d31726c1ed645973808e05b91f86f2761d53f697b21865a6" Sep 29 23:45:04 crc kubenswrapper[4922]: I0929 23:45:04.250821 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq" Sep 29 23:45:04 crc kubenswrapper[4922]: I0929 23:45:04.322268 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h"] Sep 29 23:45:04 crc kubenswrapper[4922]: I0929 23:45:04.330079 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319780-vld8h"] Sep 29 23:45:04 crc kubenswrapper[4922]: I0929 23:45:04.432319 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a3a0308-d028-4931-a959-c68dd8b2db37" path="/var/lib/kubelet/pods/1a3a0308-d028-4931-a959-c68dd8b2db37/volumes" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.852902 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jkgd8"] Sep 29 23:45:12 crc kubenswrapper[4922]: E0929 23:45:12.853836 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="769ecc0b-2d3a-433d-b206-55953f8a6169" containerName="collect-profiles" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.853849 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="769ecc0b-2d3a-433d-b206-55953f8a6169" containerName="collect-profiles" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.854002 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="769ecc0b-2d3a-433d-b206-55953f8a6169" containerName="collect-profiles" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.854737 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.857018 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.857534 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.857793 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-vfz4m" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.857927 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.857944 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.868069 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jkgd8"] Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.979142 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-jkgd8\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.979415 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-config\") pod \"dnsmasq-dns-5d7b5456f5-jkgd8\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:12 crc kubenswrapper[4922]: I0929 23:45:12.979617 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srzwp\" (UniqueName: \"kubernetes.io/projected/73fa16a2-76e0-446d-8629-b6b57d7e794e-kube-api-access-srzwp\") pod \"dnsmasq-dns-5d7b5456f5-jkgd8\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.081144 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-jkgd8\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.081190 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-config\") pod \"dnsmasq-dns-5d7b5456f5-jkgd8\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.081239 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srzwp\" (UniqueName: \"kubernetes.io/projected/73fa16a2-76e0-446d-8629-b6b57d7e794e-kube-api-access-srzwp\") pod \"dnsmasq-dns-5d7b5456f5-jkgd8\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.082612 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-config\") pod \"dnsmasq-dns-5d7b5456f5-jkgd8\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.082658 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-dns-svc\") pod \"dnsmasq-dns-5d7b5456f5-jkgd8\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.117350 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srzwp\" (UniqueName: \"kubernetes.io/projected/73fa16a2-76e0-446d-8629-b6b57d7e794e-kube-api-access-srzwp\") pod \"dnsmasq-dns-5d7b5456f5-jkgd8\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.140729 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-gtmv9"] Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.141838 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.155520 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-gtmv9"] Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.180920 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.182298 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-config\") pod \"dnsmasq-dns-98ddfc8f-gtmv9\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.182349 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kd5dp\" (UniqueName: \"kubernetes.io/projected/aad64b0f-67ee-43d9-b0f1-fef345d70e74-kube-api-access-kd5dp\") pod \"dnsmasq-dns-98ddfc8f-gtmv9\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.182377 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-gtmv9\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.283817 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-config\") pod \"dnsmasq-dns-98ddfc8f-gtmv9\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.283876 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kd5dp\" (UniqueName: \"kubernetes.io/projected/aad64b0f-67ee-43d9-b0f1-fef345d70e74-kube-api-access-kd5dp\") pod \"dnsmasq-dns-98ddfc8f-gtmv9\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.283903 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-gtmv9\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.284642 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-dns-svc\") pod \"dnsmasq-dns-98ddfc8f-gtmv9\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.285119 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-config\") pod \"dnsmasq-dns-98ddfc8f-gtmv9\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.309298 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kd5dp\" (UniqueName: \"kubernetes.io/projected/aad64b0f-67ee-43d9-b0f1-fef345d70e74-kube-api-access-kd5dp\") pod \"dnsmasq-dns-98ddfc8f-gtmv9\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.473693 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.702487 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jkgd8"] Sep 29 23:45:13 crc kubenswrapper[4922]: I0929 23:45:13.932575 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-gtmv9"] Sep 29 23:45:13 crc kubenswrapper[4922]: W0929 23:45:13.941845 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaad64b0f_67ee_43d9_b0f1_fef345d70e74.slice/crio-55731b27db39bd3a9218a67709ce3f736e645847ba594c1477cbc421d552894d WatchSource:0}: Error finding container 55731b27db39bd3a9218a67709ce3f736e645847ba594c1477cbc421d552894d: Status 404 returned error can't find the container with id 55731b27db39bd3a9218a67709ce3f736e645847ba594c1477cbc421d552894d Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.019430 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.020513 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.022440 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.023297 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.023412 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.023427 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.023990 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-vfsst" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.037546 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.097938 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.097985 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.098039 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.098067 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.098106 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.098142 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.098168 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kstb7\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-kube-api-access-kstb7\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.098217 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.098239 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.199605 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.199907 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.200018 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.200103 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kstb7\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-kube-api-access-kstb7\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.200210 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.200291 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.200406 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.200502 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.200642 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.201148 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.201207 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.202266 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.202562 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.205110 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.205130 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.205261 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.205292 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d7a21c8b21186b9d22b51a9fcb6932734fc516100ae81b2302fcb7fb9005d609/globalmount\"" pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.214153 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.221678 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kstb7\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-kube-api-access-kstb7\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.243416 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") pod \"rabbitmq-server-0\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.275076 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.276970 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.279735 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.279951 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.280064 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.281745 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-9prlm" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.282032 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.286379 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.303684 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72bcbb4c-7564-465c-95d7-b498d02d13a4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.303765 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.304357 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.304650 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.304771 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72bcbb4c-7564-465c-95d7-b498d02d13a4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.304902 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.305031 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjgsw\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-kube-api-access-mjgsw\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.305210 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.305371 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.345178 4922 generic.go:334] "Generic (PLEG): container finished" podID="73fa16a2-76e0-446d-8629-b6b57d7e794e" containerID="9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181" exitCode=0 Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.345233 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" event={"ID":"73fa16a2-76e0-446d-8629-b6b57d7e794e","Type":"ContainerDied","Data":"9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181"} Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.345257 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" event={"ID":"73fa16a2-76e0-446d-8629-b6b57d7e794e","Type":"ContainerStarted","Data":"ff1d592dd2f68643d0116240b8582563371f1cc5b28f0f86a0993d032a39db31"} Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.346441 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.347636 4922 generic.go:334] "Generic (PLEG): container finished" podID="aad64b0f-67ee-43d9-b0f1-fef345d70e74" containerID="7f8fff066c079691bd1b5490eae1ea6e6e39353226e56baf6df2be1951ae3b68" exitCode=0 Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.347666 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" event={"ID":"aad64b0f-67ee-43d9-b0f1-fef345d70e74","Type":"ContainerDied","Data":"7f8fff066c079691bd1b5490eae1ea6e6e39353226e56baf6df2be1951ae3b68"} Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.347837 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" event={"ID":"aad64b0f-67ee-43d9-b0f1-fef345d70e74","Type":"ContainerStarted","Data":"55731b27db39bd3a9218a67709ce3f736e645847ba594c1477cbc421d552894d"} Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.406893 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72bcbb4c-7564-465c-95d7-b498d02d13a4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.406934 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.406991 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.407028 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.407065 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72bcbb4c-7564-465c-95d7-b498d02d13a4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.407179 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.407209 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjgsw\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-kube-api-access-mjgsw\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.407261 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.407309 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.407541 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.407701 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.409012 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.409126 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.411777 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72bcbb4c-7564-465c-95d7-b498d02d13a4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.414029 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72bcbb4c-7564-465c-95d7-b498d02d13a4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.417615 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.421643 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.421680 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a7ab5f74d4bba979cda4a91f30253cacb1ced1c4859b21586f0117b4a915ff9b/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.435201 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjgsw\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-kube-api-access-mjgsw\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.477969 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") pod \"rabbitmq-cell1-server-0\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.596040 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.808551 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 23:45:14 crc kubenswrapper[4922]: W0929 23:45:14.812149 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0eac0f0c_ea78_49bf_8760_3ef8be59b5ed.slice/crio-14606a8fda878401abb5c43cfd5888719ec278f8844857d69821b2ed1b2478a5 WatchSource:0}: Error finding container 14606a8fda878401abb5c43cfd5888719ec278f8844857d69821b2ed1b2478a5: Status 404 returned error can't find the container with id 14606a8fda878401abb5c43cfd5888719ec278f8844857d69821b2ed1b2478a5 Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.836061 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 23:45:14 crc kubenswrapper[4922]: W0929 23:45:14.842164 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72bcbb4c_7564_465c_95d7_b498d02d13a4.slice/crio-e179e0b53016621e9d84a0890a35f06fafcbab0714b0bcf95f1ed45ddbc17d36 WatchSource:0}: Error finding container e179e0b53016621e9d84a0890a35f06fafcbab0714b0bcf95f1ed45ddbc17d36: Status 404 returned error can't find the container with id e179e0b53016621e9d84a0890a35f06fafcbab0714b0bcf95f1ed45ddbc17d36 Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.954251 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.955861 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.959262 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.960128 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.960519 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-r7qnb" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.961481 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.962050 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.971202 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 29 23:45:14 crc kubenswrapper[4922]: I0929 23:45:14.975351 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.117143 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0273df65-3375-4e15-b8ca-0279dc20353f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.117705 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0273df65-3375-4e15-b8ca-0279dc20353f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.117797 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/0273df65-3375-4e15-b8ca-0279dc20353f-secrets\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.117830 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0273df65-3375-4e15-b8ca-0279dc20353f-kolla-config\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.117898 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wr8q\" (UniqueName: \"kubernetes.io/projected/0273df65-3375-4e15-b8ca-0279dc20353f-kube-api-access-5wr8q\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.118098 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e978f2b8-a4f9-4f29-881f-5c72faf85cf5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e978f2b8-a4f9-4f29-881f-5c72faf85cf5\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.118170 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0273df65-3375-4e15-b8ca-0279dc20353f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.118248 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0273df65-3375-4e15-b8ca-0279dc20353f-config-data-default\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.118346 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0273df65-3375-4e15-b8ca-0279dc20353f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.219761 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e978f2b8-a4f9-4f29-881f-5c72faf85cf5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e978f2b8-a4f9-4f29-881f-5c72faf85cf5\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.219826 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0273df65-3375-4e15-b8ca-0279dc20353f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.219858 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0273df65-3375-4e15-b8ca-0279dc20353f-config-data-default\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.219911 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0273df65-3375-4e15-b8ca-0279dc20353f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.219951 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0273df65-3375-4e15-b8ca-0279dc20353f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.219980 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0273df65-3375-4e15-b8ca-0279dc20353f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.220008 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/0273df65-3375-4e15-b8ca-0279dc20353f-secrets\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.220032 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0273df65-3375-4e15-b8ca-0279dc20353f-kolla-config\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.220056 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wr8q\" (UniqueName: \"kubernetes.io/projected/0273df65-3375-4e15-b8ca-0279dc20353f-kube-api-access-5wr8q\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.221156 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0273df65-3375-4e15-b8ca-0279dc20353f-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.222138 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0273df65-3375-4e15-b8ca-0279dc20353f-kolla-config\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.222180 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0273df65-3375-4e15-b8ca-0279dc20353f-config-data-default\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.222411 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0273df65-3375-4e15-b8ca-0279dc20353f-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.226451 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.226496 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e978f2b8-a4f9-4f29-881f-5c72faf85cf5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e978f2b8-a4f9-4f29-881f-5c72faf85cf5\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e988adaecbd42226cfc20a49fdb5e31771963f7db7d10689a27b0f5794c7d707/globalmount\"" pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.257338 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0273df65-3375-4e15-b8ca-0279dc20353f-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.257496 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wr8q\" (UniqueName: \"kubernetes.io/projected/0273df65-3375-4e15-b8ca-0279dc20353f-kube-api-access-5wr8q\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.258009 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0273df65-3375-4e15-b8ca-0279dc20353f-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.259986 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/0273df65-3375-4e15-b8ca-0279dc20353f-secrets\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.348551 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e978f2b8-a4f9-4f29-881f-5c72faf85cf5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e978f2b8-a4f9-4f29-881f-5c72faf85cf5\") pod \"openstack-galera-0\" (UID: \"0273df65-3375-4e15-b8ca-0279dc20353f\") " pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.387012 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed","Type":"ContainerStarted","Data":"14606a8fda878401abb5c43cfd5888719ec278f8844857d69821b2ed1b2478a5"} Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.389456 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72bcbb4c-7564-465c-95d7-b498d02d13a4","Type":"ContainerStarted","Data":"e179e0b53016621e9d84a0890a35f06fafcbab0714b0bcf95f1ed45ddbc17d36"} Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.403926 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" event={"ID":"aad64b0f-67ee-43d9-b0f1-fef345d70e74","Type":"ContainerStarted","Data":"0f8796815dd6c4e833803702720ffcfd2e0a3df4c59b910ccd8073d4f27b316b"} Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.404970 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.412083 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" event={"ID":"73fa16a2-76e0-446d-8629-b6b57d7e794e","Type":"ContainerStarted","Data":"5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f"} Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.412326 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.422350 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:45:15 crc kubenswrapper[4922]: E0929 23:45:15.427231 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.430853 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" podStartSLOduration=2.430838215 podStartE2EDuration="2.430838215s" podCreationTimestamp="2025-09-29 23:45:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:45:15.429929443 +0000 UTC m=+4719.740218256" watchObservedRunningTime="2025-09-29 23:45:15.430838215 +0000 UTC m=+4719.741127028" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.454595 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.458421 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.461162 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-d4dpv" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.466717 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" podStartSLOduration=3.466698858 podStartE2EDuration="3.466698858s" podCreationTimestamp="2025-09-29 23:45:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:45:15.461330616 +0000 UTC m=+4719.771619429" watchObservedRunningTime="2025-09-29 23:45:15.466698858 +0000 UTC m=+4719.776987661" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.466750 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.493623 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.608451 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.629613 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdd5b23e-56a0-4711-bd79-061dc4b72cb3-config-data\") pod \"memcached-0\" (UID: \"cdd5b23e-56a0-4711-bd79-061dc4b72cb3\") " pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.629716 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28bqj\" (UniqueName: \"kubernetes.io/projected/cdd5b23e-56a0-4711-bd79-061dc4b72cb3-kube-api-access-28bqj\") pod \"memcached-0\" (UID: \"cdd5b23e-56a0-4711-bd79-061dc4b72cb3\") " pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.629738 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cdd5b23e-56a0-4711-bd79-061dc4b72cb3-kolla-config\") pod \"memcached-0\" (UID: \"cdd5b23e-56a0-4711-bd79-061dc4b72cb3\") " pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.732042 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28bqj\" (UniqueName: \"kubernetes.io/projected/cdd5b23e-56a0-4711-bd79-061dc4b72cb3-kube-api-access-28bqj\") pod \"memcached-0\" (UID: \"cdd5b23e-56a0-4711-bd79-061dc4b72cb3\") " pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.732112 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cdd5b23e-56a0-4711-bd79-061dc4b72cb3-kolla-config\") pod \"memcached-0\" (UID: \"cdd5b23e-56a0-4711-bd79-061dc4b72cb3\") " pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.732220 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdd5b23e-56a0-4711-bd79-061dc4b72cb3-config-data\") pod \"memcached-0\" (UID: \"cdd5b23e-56a0-4711-bd79-061dc4b72cb3\") " pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.733229 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cdd5b23e-56a0-4711-bd79-061dc4b72cb3-config-data\") pod \"memcached-0\" (UID: \"cdd5b23e-56a0-4711-bd79-061dc4b72cb3\") " pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.734639 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cdd5b23e-56a0-4711-bd79-061dc4b72cb3-kolla-config\") pod \"memcached-0\" (UID: \"cdd5b23e-56a0-4711-bd79-061dc4b72cb3\") " pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.761444 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28bqj\" (UniqueName: \"kubernetes.io/projected/cdd5b23e-56a0-4711-bd79-061dc4b72cb3-kube-api-access-28bqj\") pod \"memcached-0\" (UID: \"cdd5b23e-56a0-4711-bd79-061dc4b72cb3\") " pod="openstack/memcached-0" Sep 29 23:45:15 crc kubenswrapper[4922]: I0929 23:45:15.799730 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.120484 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 23:45:16 crc kubenswrapper[4922]: W0929 23:45:16.133007 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0273df65_3375_4e15_b8ca_0279dc20353f.slice/crio-4c22c7bdfe3b37149c1a92d7f7b163b15afa444b862dc9b09cae1dc755377587 WatchSource:0}: Error finding container 4c22c7bdfe3b37149c1a92d7f7b163b15afa444b862dc9b09cae1dc755377587: Status 404 returned error can't find the container with id 4c22c7bdfe3b37149c1a92d7f7b163b15afa444b862dc9b09cae1dc755377587 Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.217253 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 23:45:16 crc kubenswrapper[4922]: W0929 23:45:16.222229 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcdd5b23e_56a0_4711_bd79_061dc4b72cb3.slice/crio-8d198e8ed9b63176165f75924b0533c1448b9b462e5a3127d40cf3b51f7141b2 WatchSource:0}: Error finding container 8d198e8ed9b63176165f75924b0533c1448b9b462e5a3127d40cf3b51f7141b2: Status 404 returned error can't find the container with id 8d198e8ed9b63176165f75924b0533c1448b9b462e5a3127d40cf3b51f7141b2 Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.415068 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.417136 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.426482 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.426876 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.426986 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-j87fx" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.427132 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.445256 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"cdd5b23e-56a0-4711-bd79-061dc4b72cb3","Type":"ContainerStarted","Data":"48f306da0547380d726846de567d032c39f5dc74102e08761f2c0d36bf5239b1"} Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.445324 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.445346 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"cdd5b23e-56a0-4711-bd79-061dc4b72cb3","Type":"ContainerStarted","Data":"8d198e8ed9b63176165f75924b0533c1448b9b462e5a3127d40cf3b51f7141b2"} Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.445365 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed","Type":"ContainerStarted","Data":"f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb"} Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.445385 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72bcbb4c-7564-465c-95d7-b498d02d13a4","Type":"ContainerStarted","Data":"47c614d9cd11959a3be065ac0127f94118344d7a3edcf9f4e539253de68e1711"} Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.445429 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0273df65-3375-4e15-b8ca-0279dc20353f","Type":"ContainerStarted","Data":"ce92f7cfa07376608cd248af3a73dbf4a03d7e4347d6153b294743a0903daab6"} Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.445450 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0273df65-3375-4e15-b8ca-0279dc20353f","Type":"ContainerStarted","Data":"4c22c7bdfe3b37149c1a92d7f7b163b15afa444b862dc9b09cae1dc755377587"} Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.500523 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.530949 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=1.5309323209999999 podStartE2EDuration="1.530932321s" podCreationTimestamp="2025-09-29 23:45:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:45:16.530854869 +0000 UTC m=+4720.841143702" watchObservedRunningTime="2025-09-29 23:45:16.530932321 +0000 UTC m=+4720.841221134" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.545337 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ebc12c4d-ffae-4690-870b-505b8b7f89ea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ebc12c4d-ffae-4690-870b-505b8b7f89ea\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.545466 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/77609099-dbc7-4148-b163-6013051afaba-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.545558 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/77609099-dbc7-4148-b163-6013051afaba-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.545626 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77609099-dbc7-4148-b163-6013051afaba-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.545668 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/77609099-dbc7-4148-b163-6013051afaba-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.545846 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77609099-dbc7-4148-b163-6013051afaba-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.546074 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/77609099-dbc7-4148-b163-6013051afaba-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.546155 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lx8c\" (UniqueName: \"kubernetes.io/projected/77609099-dbc7-4148-b163-6013051afaba-kube-api-access-9lx8c\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.546278 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/77609099-dbc7-4148-b163-6013051afaba-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.649527 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77609099-dbc7-4148-b163-6013051afaba-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.649571 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/77609099-dbc7-4148-b163-6013051afaba-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.649613 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lx8c\" (UniqueName: \"kubernetes.io/projected/77609099-dbc7-4148-b163-6013051afaba-kube-api-access-9lx8c\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.649676 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/77609099-dbc7-4148-b163-6013051afaba-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.649715 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ebc12c4d-ffae-4690-870b-505b8b7f89ea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ebc12c4d-ffae-4690-870b-505b8b7f89ea\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.649738 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/77609099-dbc7-4148-b163-6013051afaba-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.649764 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/77609099-dbc7-4148-b163-6013051afaba-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.649785 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77609099-dbc7-4148-b163-6013051afaba-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.649803 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/77609099-dbc7-4148-b163-6013051afaba-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.650468 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/77609099-dbc7-4148-b163-6013051afaba-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.651645 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/77609099-dbc7-4148-b163-6013051afaba-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.652355 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/77609099-dbc7-4148-b163-6013051afaba-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.653068 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77609099-dbc7-4148-b163-6013051afaba-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.658712 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:45:16 crc kubenswrapper[4922]: I0929 23:45:16.658753 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ebc12c4d-ffae-4690-870b-505b8b7f89ea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ebc12c4d-ffae-4690-870b-505b8b7f89ea\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/999ddfe33ea1fec991db050544f4ad22502ae163bc21c64be1518f25ff1c8ee6/globalmount\"" pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:17 crc kubenswrapper[4922]: I0929 23:45:17.050169 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/77609099-dbc7-4148-b163-6013051afaba-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:17 crc kubenswrapper[4922]: I0929 23:45:17.050339 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/77609099-dbc7-4148-b163-6013051afaba-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:17 crc kubenswrapper[4922]: I0929 23:45:17.050621 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lx8c\" (UniqueName: \"kubernetes.io/projected/77609099-dbc7-4148-b163-6013051afaba-kube-api-access-9lx8c\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:17 crc kubenswrapper[4922]: I0929 23:45:17.050815 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77609099-dbc7-4148-b163-6013051afaba-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:17 crc kubenswrapper[4922]: I0929 23:45:17.097831 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ebc12c4d-ffae-4690-870b-505b8b7f89ea\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ebc12c4d-ffae-4690-870b-505b8b7f89ea\") pod \"openstack-cell1-galera-0\" (UID: \"77609099-dbc7-4148-b163-6013051afaba\") " pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:17 crc kubenswrapper[4922]: I0929 23:45:17.341417 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:17 crc kubenswrapper[4922]: I0929 23:45:17.944633 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 23:45:18 crc kubenswrapper[4922]: I0929 23:45:18.506023 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"77609099-dbc7-4148-b163-6013051afaba","Type":"ContainerStarted","Data":"023f5eab761ea497f1a3b369ba3aaf96a67eb69de0df32501c6fc2c08dc5dd74"} Sep 29 23:45:18 crc kubenswrapper[4922]: I0929 23:45:18.506545 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"77609099-dbc7-4148-b163-6013051afaba","Type":"ContainerStarted","Data":"7b28eda48cabcb4c5b5ef074f2d4ee1c81e4ba2fbe13a8ee9d129427e7ce58ac"} Sep 29 23:45:21 crc kubenswrapper[4922]: I0929 23:45:21.544337 4922 generic.go:334] "Generic (PLEG): container finished" podID="0273df65-3375-4e15-b8ca-0279dc20353f" containerID="ce92f7cfa07376608cd248af3a73dbf4a03d7e4347d6153b294743a0903daab6" exitCode=0 Sep 29 23:45:21 crc kubenswrapper[4922]: I0929 23:45:21.544434 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0273df65-3375-4e15-b8ca-0279dc20353f","Type":"ContainerDied","Data":"ce92f7cfa07376608cd248af3a73dbf4a03d7e4347d6153b294743a0903daab6"} Sep 29 23:45:21 crc kubenswrapper[4922]: I0929 23:45:21.547774 4922 generic.go:334] "Generic (PLEG): container finished" podID="77609099-dbc7-4148-b163-6013051afaba" containerID="023f5eab761ea497f1a3b369ba3aaf96a67eb69de0df32501c6fc2c08dc5dd74" exitCode=0 Sep 29 23:45:21 crc kubenswrapper[4922]: I0929 23:45:21.547819 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"77609099-dbc7-4148-b163-6013051afaba","Type":"ContainerDied","Data":"023f5eab761ea497f1a3b369ba3aaf96a67eb69de0df32501c6fc2c08dc5dd74"} Sep 29 23:45:22 crc kubenswrapper[4922]: I0929 23:45:22.559120 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"77609099-dbc7-4148-b163-6013051afaba","Type":"ContainerStarted","Data":"c151f6a716492e5f432000e59ddeb2802d2809458bbd15074aa1b266a2a540df"} Sep 29 23:45:22 crc kubenswrapper[4922]: I0929 23:45:22.561595 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"0273df65-3375-4e15-b8ca-0279dc20353f","Type":"ContainerStarted","Data":"977a4aa29b383cbaa08c8e4ef72b0ad532b47ddf1628fbd07f040e120ed8acc7"} Sep 29 23:45:22 crc kubenswrapper[4922]: I0929 23:45:22.587881 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.587865689 podStartE2EDuration="7.587865689s" podCreationTimestamp="2025-09-29 23:45:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:45:22.586319701 +0000 UTC m=+4726.896608514" watchObservedRunningTime="2025-09-29 23:45:22.587865689 +0000 UTC m=+4726.898154502" Sep 29 23:45:22 crc kubenswrapper[4922]: I0929 23:45:22.617249 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=9.617227812 podStartE2EDuration="9.617227812s" podCreationTimestamp="2025-09-29 23:45:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:45:22.611818239 +0000 UTC m=+4726.922107062" watchObservedRunningTime="2025-09-29 23:45:22.617227812 +0000 UTC m=+4726.927516625" Sep 29 23:45:23 crc kubenswrapper[4922]: I0929 23:45:23.183633 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:23 crc kubenswrapper[4922]: I0929 23:45:23.474636 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:45:23 crc kubenswrapper[4922]: I0929 23:45:23.541692 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jkgd8"] Sep 29 23:45:23 crc kubenswrapper[4922]: I0929 23:45:23.570777 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" podUID="73fa16a2-76e0-446d-8629-b6b57d7e794e" containerName="dnsmasq-dns" containerID="cri-o://5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f" gracePeriod=10 Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.011417 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.093950 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srzwp\" (UniqueName: \"kubernetes.io/projected/73fa16a2-76e0-446d-8629-b6b57d7e794e-kube-api-access-srzwp\") pod \"73fa16a2-76e0-446d-8629-b6b57d7e794e\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.094042 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-config\") pod \"73fa16a2-76e0-446d-8629-b6b57d7e794e\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.094212 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-dns-svc\") pod \"73fa16a2-76e0-446d-8629-b6b57d7e794e\" (UID: \"73fa16a2-76e0-446d-8629-b6b57d7e794e\") " Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.099753 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73fa16a2-76e0-446d-8629-b6b57d7e794e-kube-api-access-srzwp" (OuterVolumeSpecName: "kube-api-access-srzwp") pod "73fa16a2-76e0-446d-8629-b6b57d7e794e" (UID: "73fa16a2-76e0-446d-8629-b6b57d7e794e"). InnerVolumeSpecName "kube-api-access-srzwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.127793 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "73fa16a2-76e0-446d-8629-b6b57d7e794e" (UID: "73fa16a2-76e0-446d-8629-b6b57d7e794e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.136461 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-config" (OuterVolumeSpecName: "config") pod "73fa16a2-76e0-446d-8629-b6b57d7e794e" (UID: "73fa16a2-76e0-446d-8629-b6b57d7e794e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.195929 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srzwp\" (UniqueName: \"kubernetes.io/projected/73fa16a2-76e0-446d-8629-b6b57d7e794e-kube-api-access-srzwp\") on node \"crc\" DevicePath \"\"" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.195979 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.195998 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/73fa16a2-76e0-446d-8629-b6b57d7e794e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.582382 4922 generic.go:334] "Generic (PLEG): container finished" podID="73fa16a2-76e0-446d-8629-b6b57d7e794e" containerID="5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f" exitCode=0 Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.582539 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" event={"ID":"73fa16a2-76e0-446d-8629-b6b57d7e794e","Type":"ContainerDied","Data":"5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f"} Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.582976 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" event={"ID":"73fa16a2-76e0-446d-8629-b6b57d7e794e","Type":"ContainerDied","Data":"ff1d592dd2f68643d0116240b8582563371f1cc5b28f0f86a0993d032a39db31"} Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.583017 4922 scope.go:117] "RemoveContainer" containerID="5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.582637 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d7b5456f5-jkgd8" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.618337 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jkgd8"] Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.618783 4922 scope.go:117] "RemoveContainer" containerID="9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.624685 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d7b5456f5-jkgd8"] Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.642260 4922 scope.go:117] "RemoveContainer" containerID="5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f" Sep 29 23:45:24 crc kubenswrapper[4922]: E0929 23:45:24.642837 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f\": container with ID starting with 5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f not found: ID does not exist" containerID="5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.642875 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f"} err="failed to get container status \"5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f\": rpc error: code = NotFound desc = could not find container \"5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f\": container with ID starting with 5fc2adfdf094521e0f123d93f625c4e6c301284827372ec71f16b4706c62ee9f not found: ID does not exist" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.642900 4922 scope.go:117] "RemoveContainer" containerID="9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181" Sep 29 23:45:24 crc kubenswrapper[4922]: E0929 23:45:24.643443 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181\": container with ID starting with 9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181 not found: ID does not exist" containerID="9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181" Sep 29 23:45:24 crc kubenswrapper[4922]: I0929 23:45:24.643486 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181"} err="failed to get container status \"9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181\": rpc error: code = NotFound desc = could not find container \"9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181\": container with ID starting with 9dc6439abb016afee92b0a1e4c4a2d07510f1912727c49366b9377afb8840181 not found: ID does not exist" Sep 29 23:45:25 crc kubenswrapper[4922]: I0929 23:45:25.609427 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 29 23:45:25 crc kubenswrapper[4922]: I0929 23:45:25.610728 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 29 23:45:25 crc kubenswrapper[4922]: I0929 23:45:25.801743 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 29 23:45:26 crc kubenswrapper[4922]: I0929 23:45:26.440157 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73fa16a2-76e0-446d-8629-b6b57d7e794e" path="/var/lib/kubelet/pods/73fa16a2-76e0-446d-8629-b6b57d7e794e/volumes" Sep 29 23:45:27 crc kubenswrapper[4922]: I0929 23:45:27.343057 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:27 crc kubenswrapper[4922]: I0929 23:45:27.345113 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:28 crc kubenswrapper[4922]: I0929 23:45:28.266976 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 29 23:45:28 crc kubenswrapper[4922]: I0929 23:45:28.334454 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 29 23:45:28 crc kubenswrapper[4922]: I0929 23:45:28.424896 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:45:28 crc kubenswrapper[4922]: E0929 23:45:28.425796 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:45:29 crc kubenswrapper[4922]: I0929 23:45:29.428367 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:29 crc kubenswrapper[4922]: I0929 23:45:29.994040 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 29 23:45:43 crc kubenswrapper[4922]: I0929 23:45:43.421858 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:45:43 crc kubenswrapper[4922]: E0929 23:45:43.423053 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:45:46 crc kubenswrapper[4922]: I0929 23:45:46.803985 4922 generic.go:334] "Generic (PLEG): container finished" podID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" containerID="f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb" exitCode=0 Sep 29 23:45:46 crc kubenswrapper[4922]: I0929 23:45:46.804121 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed","Type":"ContainerDied","Data":"f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb"} Sep 29 23:45:46 crc kubenswrapper[4922]: I0929 23:45:46.807523 4922 generic.go:334] "Generic (PLEG): container finished" podID="72bcbb4c-7564-465c-95d7-b498d02d13a4" containerID="47c614d9cd11959a3be065ac0127f94118344d7a3edcf9f4e539253de68e1711" exitCode=0 Sep 29 23:45:46 crc kubenswrapper[4922]: I0929 23:45:46.807565 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72bcbb4c-7564-465c-95d7-b498d02d13a4","Type":"ContainerDied","Data":"47c614d9cd11959a3be065ac0127f94118344d7a3edcf9f4e539253de68e1711"} Sep 29 23:45:47 crc kubenswrapper[4922]: I0929 23:45:47.817872 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72bcbb4c-7564-465c-95d7-b498d02d13a4","Type":"ContainerStarted","Data":"8d7e80bb432889ce2080a297300e9c29abab8e27e813cdeafb5587952e167afb"} Sep 29 23:45:47 crc kubenswrapper[4922]: I0929 23:45:47.818114 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:45:47 crc kubenswrapper[4922]: I0929 23:45:47.821968 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed","Type":"ContainerStarted","Data":"2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89"} Sep 29 23:45:47 crc kubenswrapper[4922]: I0929 23:45:47.822210 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 23:45:47 crc kubenswrapper[4922]: I0929 23:45:47.864302 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=34.864266202 podStartE2EDuration="34.864266202s" podCreationTimestamp="2025-09-29 23:45:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:45:47.850977305 +0000 UTC m=+4752.161266108" watchObservedRunningTime="2025-09-29 23:45:47.864266202 +0000 UTC m=+4752.174555055" Sep 29 23:45:47 crc kubenswrapper[4922]: I0929 23:45:47.879747 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=35.879715782 podStartE2EDuration="35.879715782s" podCreationTimestamp="2025-09-29 23:45:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:45:47.869586893 +0000 UTC m=+4752.179875706" watchObservedRunningTime="2025-09-29 23:45:47.879715782 +0000 UTC m=+4752.190004635" Sep 29 23:45:51 crc kubenswrapper[4922]: I0929 23:45:51.680823 4922 scope.go:117] "RemoveContainer" containerID="2c17b313cc66290d86db66ac133a7bbc85795c0b5bfd4801035cc184ca0a7e20" Sep 29 23:45:56 crc kubenswrapper[4922]: I0929 23:45:56.425184 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:45:56 crc kubenswrapper[4922]: E0929 23:45:56.425969 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:46:04 crc kubenswrapper[4922]: I0929 23:46:04.349735 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 23:46:04 crc kubenswrapper[4922]: I0929 23:46:04.600467 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.401061 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-7hjcz"] Sep 29 23:46:11 crc kubenswrapper[4922]: E0929 23:46:11.402939 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73fa16a2-76e0-446d-8629-b6b57d7e794e" containerName="init" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.403082 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="73fa16a2-76e0-446d-8629-b6b57d7e794e" containerName="init" Sep 29 23:46:11 crc kubenswrapper[4922]: E0929 23:46:11.403174 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73fa16a2-76e0-446d-8629-b6b57d7e794e" containerName="dnsmasq-dns" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.403258 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="73fa16a2-76e0-446d-8629-b6b57d7e794e" containerName="dnsmasq-dns" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.403593 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="73fa16a2-76e0-446d-8629-b6b57d7e794e" containerName="dnsmasq-dns" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.404736 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.417584 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-7hjcz"] Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.424136 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:46:11 crc kubenswrapper[4922]: E0929 23:46:11.424599 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.438241 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-7hjcz\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.438317 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-config\") pod \"dnsmasq-dns-5b7946d7b9-7hjcz\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.438475 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q75jk\" (UniqueName: \"kubernetes.io/projected/714fe763-a262-451b-87c5-a2bf6759aee8-kube-api-access-q75jk\") pod \"dnsmasq-dns-5b7946d7b9-7hjcz\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.540091 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-config\") pod \"dnsmasq-dns-5b7946d7b9-7hjcz\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.540200 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q75jk\" (UniqueName: \"kubernetes.io/projected/714fe763-a262-451b-87c5-a2bf6759aee8-kube-api-access-q75jk\") pod \"dnsmasq-dns-5b7946d7b9-7hjcz\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.540270 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-7hjcz\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.541767 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-dns-svc\") pod \"dnsmasq-dns-5b7946d7b9-7hjcz\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.541806 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-config\") pod \"dnsmasq-dns-5b7946d7b9-7hjcz\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.564069 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q75jk\" (UniqueName: \"kubernetes.io/projected/714fe763-a262-451b-87c5-a2bf6759aee8-kube-api-access-q75jk\") pod \"dnsmasq-dns-5b7946d7b9-7hjcz\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:11 crc kubenswrapper[4922]: I0929 23:46:11.728268 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:12 crc kubenswrapper[4922]: I0929 23:46:12.043503 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 23:46:12 crc kubenswrapper[4922]: I0929 23:46:12.163950 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-7hjcz"] Sep 29 23:46:12 crc kubenswrapper[4922]: I0929 23:46:12.778570 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 23:46:13 crc kubenswrapper[4922]: I0929 23:46:13.040548 4922 generic.go:334] "Generic (PLEG): container finished" podID="714fe763-a262-451b-87c5-a2bf6759aee8" containerID="b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b" exitCode=0 Sep 29 23:46:13 crc kubenswrapper[4922]: I0929 23:46:13.040820 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" event={"ID":"714fe763-a262-451b-87c5-a2bf6759aee8","Type":"ContainerDied","Data":"b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b"} Sep 29 23:46:13 crc kubenswrapper[4922]: I0929 23:46:13.040847 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" event={"ID":"714fe763-a262-451b-87c5-a2bf6759aee8","Type":"ContainerStarted","Data":"e1fe204ea3b6fbc85cce9e06289652928ab60b13b6024341a0be245ebf668f30"} Sep 29 23:46:14 crc kubenswrapper[4922]: I0929 23:46:14.051138 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" event={"ID":"714fe763-a262-451b-87c5-a2bf6759aee8","Type":"ContainerStarted","Data":"57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75"} Sep 29 23:46:14 crc kubenswrapper[4922]: I0929 23:46:14.051478 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:14 crc kubenswrapper[4922]: I0929 23:46:14.074449 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" podStartSLOduration=3.074366188 podStartE2EDuration="3.074366188s" podCreationTimestamp="2025-09-29 23:46:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:46:14.070167854 +0000 UTC m=+4778.380456698" watchObservedRunningTime="2025-09-29 23:46:14.074366188 +0000 UTC m=+4778.384655041" Sep 29 23:46:14 crc kubenswrapper[4922]: I0929 23:46:14.089564 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" containerName="rabbitmq" containerID="cri-o://2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89" gracePeriod=604798 Sep 29 23:46:14 crc kubenswrapper[4922]: I0929 23:46:14.347829 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.235:5672: connect: connection refused" Sep 29 23:46:14 crc kubenswrapper[4922]: I0929 23:46:14.597580 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="72bcbb4c-7564-465c-95d7-b498d02d13a4" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.236:5672: connect: connection refused" Sep 29 23:46:14 crc kubenswrapper[4922]: I0929 23:46:14.611941 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="72bcbb4c-7564-465c-95d7-b498d02d13a4" containerName="rabbitmq" containerID="cri-o://8d7e80bb432889ce2080a297300e9c29abab8e27e813cdeafb5587952e167afb" gracePeriod=604799 Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.686984 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.779869 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-plugins\") pod \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.779936 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-confd\") pod \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780008 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-erlang-cookie\") pod \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780128 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-server-conf\") pod \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780285 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") pod \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780320 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-pod-info\") pod \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780352 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kstb7\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-kube-api-access-kstb7\") pod \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780424 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-erlang-cookie-secret\") pod \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780462 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-plugins-conf\") pod \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\" (UID: \"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed\") " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780307 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" (UID: "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780832 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.780858 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" (UID: "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.781773 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" (UID: "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.788542 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-kube-api-access-kstb7" (OuterVolumeSpecName: "kube-api-access-kstb7") pod "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" (UID: "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed"). InnerVolumeSpecName "kube-api-access-kstb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.795021 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" (UID: "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.798596 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-pod-info" (OuterVolumeSpecName: "pod-info") pod "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" (UID: "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.819038 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c" (OuterVolumeSpecName: "persistence") pod "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" (UID: "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed"). InnerVolumeSpecName "pvc-fbb59102-c63a-4622-98f9-272834a7042c". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.822988 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-server-conf" (OuterVolumeSpecName: "server-conf") pod "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" (UID: "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.882023 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.882054 4922 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.882081 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") on node \"crc\" " Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.882092 4922 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.882101 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kstb7\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-kube-api-access-kstb7\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.882111 4922 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.882120 4922 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.900365 4922 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.900460 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" (UID: "0eac0f0c-ea78-49bf-8760-3ef8be59b5ed"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.900552 4922 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-fbb59102-c63a-4622-98f9-272834a7042c" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c") on node "crc" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.983229 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:20 crc kubenswrapper[4922]: I0929 23:46:20.983257 4922 reconciler_common.go:293] "Volume detached for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.117527 4922 generic.go:334] "Generic (PLEG): container finished" podID="72bcbb4c-7564-465c-95d7-b498d02d13a4" containerID="8d7e80bb432889ce2080a297300e9c29abab8e27e813cdeafb5587952e167afb" exitCode=0 Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.117596 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72bcbb4c-7564-465c-95d7-b498d02d13a4","Type":"ContainerDied","Data":"8d7e80bb432889ce2080a297300e9c29abab8e27e813cdeafb5587952e167afb"} Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.119271 4922 generic.go:334] "Generic (PLEG): container finished" podID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" containerID="2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89" exitCode=0 Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.119338 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.119322 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed","Type":"ContainerDied","Data":"2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89"} Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.119444 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0eac0f0c-ea78-49bf-8760-3ef8be59b5ed","Type":"ContainerDied","Data":"14606a8fda878401abb5c43cfd5888719ec278f8844857d69821b2ed1b2478a5"} Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.119489 4922 scope.go:117] "RemoveContainer" containerID="2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.155164 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.161636 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.179980 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 23:46:21 crc kubenswrapper[4922]: E0929 23:46:21.180436 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" containerName="rabbitmq" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.180466 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" containerName="rabbitmq" Sep 29 23:46:21 crc kubenswrapper[4922]: E0929 23:46:21.180514 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" containerName="setup-container" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.180529 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" containerName="setup-container" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.181161 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" containerName="rabbitmq" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.182720 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.185793 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.185847 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.185847 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.185920 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.187723 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-vfsst" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.200655 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.259954 4922 scope.go:117] "RemoveContainer" containerID="f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.264207 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.274857 4922 scope.go:117] "RemoveContainer" containerID="2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89" Sep 29 23:46:21 crc kubenswrapper[4922]: E0929 23:46:21.275190 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89\": container with ID starting with 2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89 not found: ID does not exist" containerID="2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.275215 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89"} err="failed to get container status \"2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89\": rpc error: code = NotFound desc = could not find container \"2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89\": container with ID starting with 2024425652b87e44b75aee1327e95716c592d44ff314cb9214b6e08fc8d76d89 not found: ID does not exist" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.275235 4922 scope.go:117] "RemoveContainer" containerID="f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb" Sep 29 23:46:21 crc kubenswrapper[4922]: E0929 23:46:21.275706 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb\": container with ID starting with f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb not found: ID does not exist" containerID="f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.275726 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb"} err="failed to get container status \"f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb\": rpc error: code = NotFound desc = could not find container \"f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb\": container with ID starting with f6ab9a9be1bd593f49d8ac2702175f6441fd9e4468f981be15eabf6cfd5b7fcb not found: ID does not exist" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.388369 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-plugins-conf\") pod \"72bcbb4c-7564-465c-95d7-b498d02d13a4\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.388435 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-confd\") pod \"72bcbb4c-7564-465c-95d7-b498d02d13a4\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.388471 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-server-conf\") pod \"72bcbb4c-7564-465c-95d7-b498d02d13a4\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.388516 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-plugins\") pod \"72bcbb4c-7564-465c-95d7-b498d02d13a4\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.388616 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-erlang-cookie\") pod \"72bcbb4c-7564-465c-95d7-b498d02d13a4\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.388734 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") pod \"72bcbb4c-7564-465c-95d7-b498d02d13a4\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.388769 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjgsw\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-kube-api-access-mjgsw\") pod \"72bcbb4c-7564-465c-95d7-b498d02d13a4\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.388810 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72bcbb4c-7564-465c-95d7-b498d02d13a4-erlang-cookie-secret\") pod \"72bcbb4c-7564-465c-95d7-b498d02d13a4\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.388904 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "72bcbb4c-7564-465c-95d7-b498d02d13a4" (UID: "72bcbb4c-7564-465c-95d7-b498d02d13a4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.389235 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "72bcbb4c-7564-465c-95d7-b498d02d13a4" (UID: "72bcbb4c-7564-465c-95d7-b498d02d13a4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.389461 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "72bcbb4c-7564-465c-95d7-b498d02d13a4" (UID: "72bcbb4c-7564-465c-95d7-b498d02d13a4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.389517 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72bcbb4c-7564-465c-95d7-b498d02d13a4-pod-info\") pod \"72bcbb4c-7564-465c-95d7-b498d02d13a4\" (UID: \"72bcbb4c-7564-465c-95d7-b498d02d13a4\") " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.390022 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/611a3899-4697-458f-8e48-6516d4b9e899-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.390096 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/611a3899-4697-458f-8e48-6516d4b9e899-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.390119 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/611a3899-4697-458f-8e48-6516d4b9e899-pod-info\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.390321 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/611a3899-4697-458f-8e48-6516d4b9e899-server-conf\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.390518 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.390618 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/611a3899-4697-458f-8e48-6516d4b9e899-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.390738 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/611a3899-4697-458f-8e48-6516d4b9e899-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.390765 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/611a3899-4697-458f-8e48-6516d4b9e899-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.390833 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxvks\" (UniqueName: \"kubernetes.io/projected/611a3899-4697-458f-8e48-6516d4b9e899-kube-api-access-dxvks\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.391114 4922 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.391180 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.391200 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.392325 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-kube-api-access-mjgsw" (OuterVolumeSpecName: "kube-api-access-mjgsw") pod "72bcbb4c-7564-465c-95d7-b498d02d13a4" (UID: "72bcbb4c-7564-465c-95d7-b498d02d13a4"). InnerVolumeSpecName "kube-api-access-mjgsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.393154 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/72bcbb4c-7564-465c-95d7-b498d02d13a4-pod-info" (OuterVolumeSpecName: "pod-info") pod "72bcbb4c-7564-465c-95d7-b498d02d13a4" (UID: "72bcbb4c-7564-465c-95d7-b498d02d13a4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.421144 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-server-conf" (OuterVolumeSpecName: "server-conf") pod "72bcbb4c-7564-465c-95d7-b498d02d13a4" (UID: "72bcbb4c-7564-465c-95d7-b498d02d13a4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.454406 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72bcbb4c-7564-465c-95d7-b498d02d13a4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "72bcbb4c-7564-465c-95d7-b498d02d13a4" (UID: "72bcbb4c-7564-465c-95d7-b498d02d13a4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.470585 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08" (OuterVolumeSpecName: "persistence") pod "72bcbb4c-7564-465c-95d7-b498d02d13a4" (UID: "72bcbb4c-7564-465c-95d7-b498d02d13a4"). InnerVolumeSpecName "pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492127 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/611a3899-4697-458f-8e48-6516d4b9e899-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492197 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/611a3899-4697-458f-8e48-6516d4b9e899-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492220 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/611a3899-4697-458f-8e48-6516d4b9e899-pod-info\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492243 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/611a3899-4697-458f-8e48-6516d4b9e899-server-conf\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492265 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492287 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/611a3899-4697-458f-8e48-6516d4b9e899-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492311 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/611a3899-4697-458f-8e48-6516d4b9e899-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492324 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/611a3899-4697-458f-8e48-6516d4b9e899-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492340 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxvks\" (UniqueName: \"kubernetes.io/projected/611a3899-4697-458f-8e48-6516d4b9e899-kube-api-access-dxvks\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492407 4922 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/72bcbb4c-7564-465c-95d7-b498d02d13a4-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492421 4922 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/72bcbb4c-7564-465c-95d7-b498d02d13a4-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492446 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") on node \"crc\" " Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492456 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjgsw\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-kube-api-access-mjgsw\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.492467 4922 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/72bcbb4c-7564-465c-95d7-b498d02d13a4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.493857 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/611a3899-4697-458f-8e48-6516d4b9e899-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.494069 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/611a3899-4697-458f-8e48-6516d4b9e899-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.497597 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/611a3899-4697-458f-8e48-6516d4b9e899-pod-info\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.498470 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/611a3899-4697-458f-8e48-6516d4b9e899-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.499216 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/611a3899-4697-458f-8e48-6516d4b9e899-server-conf\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.501120 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/611a3899-4697-458f-8e48-6516d4b9e899-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.502415 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.502457 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d7a21c8b21186b9d22b51a9fcb6932734fc516100ae81b2302fcb7fb9005d609/globalmount\"" pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.502679 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/611a3899-4697-458f-8e48-6516d4b9e899-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.507536 4922 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.507682 4922 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08") on node "crc" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.521129 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxvks\" (UniqueName: \"kubernetes.io/projected/611a3899-4697-458f-8e48-6516d4b9e899-kube-api-access-dxvks\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.537121 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "72bcbb4c-7564-465c-95d7-b498d02d13a4" (UID: "72bcbb4c-7564-465c-95d7-b498d02d13a4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.540255 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-fbb59102-c63a-4622-98f9-272834a7042c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-fbb59102-c63a-4622-98f9-272834a7042c\") pod \"rabbitmq-server-0\" (UID: \"611a3899-4697-458f-8e48-6516d4b9e899\") " pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.594074 4922 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/72bcbb4c-7564-465c-95d7-b498d02d13a4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.594113 4922 reconciler_common.go:293] "Volume detached for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.730684 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.804564 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.821128 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-gtmv9"] Sep 29 23:46:21 crc kubenswrapper[4922]: I0929 23:46:21.821512 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" podUID="aad64b0f-67ee-43d9-b0f1-fef345d70e74" containerName="dnsmasq-dns" containerID="cri-o://0f8796815dd6c4e833803702720ffcfd2e0a3df4c59b910ccd8073d4f27b316b" gracePeriod=10 Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.160115 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"72bcbb4c-7564-465c-95d7-b498d02d13a4","Type":"ContainerDied","Data":"e179e0b53016621e9d84a0890a35f06fafcbab0714b0bcf95f1ed45ddbc17d36"} Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.160517 4922 scope.go:117] "RemoveContainer" containerID="8d7e80bb432889ce2080a297300e9c29abab8e27e813cdeafb5587952e167afb" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.160709 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.192078 4922 generic.go:334] "Generic (PLEG): container finished" podID="aad64b0f-67ee-43d9-b0f1-fef345d70e74" containerID="0f8796815dd6c4e833803702720ffcfd2e0a3df4c59b910ccd8073d4f27b316b" exitCode=0 Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.192121 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" event={"ID":"aad64b0f-67ee-43d9-b0f1-fef345d70e74","Type":"ContainerDied","Data":"0f8796815dd6c4e833803702720ffcfd2e0a3df4c59b910ccd8073d4f27b316b"} Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.245735 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.253791 4922 scope.go:117] "RemoveContainer" containerID="47c614d9cd11959a3be065ac0127f94118344d7a3edcf9f4e539253de68e1711" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.278473 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.296952 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 23:46:22 crc kubenswrapper[4922]: E0929 23:46:22.297355 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72bcbb4c-7564-465c-95d7-b498d02d13a4" containerName="rabbitmq" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.297367 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="72bcbb4c-7564-465c-95d7-b498d02d13a4" containerName="rabbitmq" Sep 29 23:46:22 crc kubenswrapper[4922]: E0929 23:46:22.297407 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72bcbb4c-7564-465c-95d7-b498d02d13a4" containerName="setup-container" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.297414 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="72bcbb4c-7564-465c-95d7-b498d02d13a4" containerName="setup-container" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.297553 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="72bcbb4c-7564-465c-95d7-b498d02d13a4" containerName="rabbitmq" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.298375 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.300614 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.300630 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.305732 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.307632 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.307996 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.308124 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-9prlm" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.327195 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.433058 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eac0f0c-ea78-49bf-8760-3ef8be59b5ed" path="/var/lib/kubelet/pods/0eac0f0c-ea78-49bf-8760-3ef8be59b5ed/volumes" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.434785 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72bcbb4c-7564-465c-95d7-b498d02d13a4" path="/var/lib/kubelet/pods/72bcbb4c-7564-465c-95d7-b498d02d13a4/volumes" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.441339 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.441447 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.441577 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.441650 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.441702 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.441740 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.441762 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8h5nm\" (UniqueName: \"kubernetes.io/projected/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-kube-api-access-8h5nm\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.441802 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.441923 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.510447 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.543623 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.543773 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.543841 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.543894 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.543929 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.543958 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.543979 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8h5nm\" (UniqueName: \"kubernetes.io/projected/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-kube-api-access-8h5nm\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.544010 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.544066 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.544818 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.544937 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.546037 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.548122 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.548550 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.548862 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.552767 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.552801 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a7ab5f74d4bba979cda4a91f30253cacb1ced1c4859b21586f0117b4a915ff9b/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.552846 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.568197 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8h5nm\" (UniqueName: \"kubernetes.io/projected/098321c4-3c5e-485c-bc49-fe5f5bc63b6e-kube-api-access-8h5nm\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.587083 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-be09929c-26fc-430e-9f3a-3eb9b2c15d08\") pod \"rabbitmq-cell1-server-0\" (UID: \"098321c4-3c5e-485c-bc49-fe5f5bc63b6e\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.627468 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.645500 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kd5dp\" (UniqueName: \"kubernetes.io/projected/aad64b0f-67ee-43d9-b0f1-fef345d70e74-kube-api-access-kd5dp\") pod \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.645632 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-dns-svc\") pod \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.645773 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-config\") pod \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\" (UID: \"aad64b0f-67ee-43d9-b0f1-fef345d70e74\") " Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.650853 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aad64b0f-67ee-43d9-b0f1-fef345d70e74-kube-api-access-kd5dp" (OuterVolumeSpecName: "kube-api-access-kd5dp") pod "aad64b0f-67ee-43d9-b0f1-fef345d70e74" (UID: "aad64b0f-67ee-43d9-b0f1-fef345d70e74"). InnerVolumeSpecName "kube-api-access-kd5dp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:46:22 crc kubenswrapper[4922]: I0929 23:46:22.693873 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-config" (OuterVolumeSpecName: "config") pod "aad64b0f-67ee-43d9-b0f1-fef345d70e74" (UID: "aad64b0f-67ee-43d9-b0f1-fef345d70e74"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:22.702869 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "aad64b0f-67ee-43d9-b0f1-fef345d70e74" (UID: "aad64b0f-67ee-43d9-b0f1-fef345d70e74"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:22.747229 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kd5dp\" (UniqueName: \"kubernetes.io/projected/aad64b0f-67ee-43d9-b0f1-fef345d70e74-kube-api-access-kd5dp\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:22.747252 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:22.747264 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aad64b0f-67ee-43d9-b0f1-fef345d70e74-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:23.207674 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:23.207771 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-98ddfc8f-gtmv9" event={"ID":"aad64b0f-67ee-43d9-b0f1-fef345d70e74","Type":"ContainerDied","Data":"55731b27db39bd3a9218a67709ce3f736e645847ba594c1477cbc421d552894d"} Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:23.210140 4922 scope.go:117] "RemoveContainer" containerID="0f8796815dd6c4e833803702720ffcfd2e0a3df4c59b910ccd8073d4f27b316b" Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:23.213187 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"611a3899-4697-458f-8e48-6516d4b9e899","Type":"ContainerStarted","Data":"d78f263789b259e6aa78dd1b292bf664cc99bc43ef4e9884ca9a39ce828f114b"} Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:23.213246 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"611a3899-4697-458f-8e48-6516d4b9e899","Type":"ContainerStarted","Data":"89ad8d627bd4f3677e81d258e65efd7d40f8be8761af5792746b34af6442624c"} Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:23.252172 4922 scope.go:117] "RemoveContainer" containerID="7f8fff066c079691bd1b5490eae1ea6e6e39353226e56baf6df2be1951ae3b68" Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:23.285923 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-gtmv9"] Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:23.292914 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-98ddfc8f-gtmv9"] Sep 29 23:46:23 crc kubenswrapper[4922]: I0929 23:46:23.749931 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 23:46:23 crc kubenswrapper[4922]: W0929 23:46:23.752798 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod098321c4_3c5e_485c_bc49_fe5f5bc63b6e.slice/crio-20ebd49c4ef01170e1e90146320c8274121371db847f5ee3a56602f5865e003a WatchSource:0}: Error finding container 20ebd49c4ef01170e1e90146320c8274121371db847f5ee3a56602f5865e003a: Status 404 returned error can't find the container with id 20ebd49c4ef01170e1e90146320c8274121371db847f5ee3a56602f5865e003a Sep 29 23:46:24 crc kubenswrapper[4922]: I0929 23:46:24.249054 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"098321c4-3c5e-485c-bc49-fe5f5bc63b6e","Type":"ContainerStarted","Data":"08523db283543626d59d4e088f109c37b23b219c66a085575d830cc220405d41"} Sep 29 23:46:24 crc kubenswrapper[4922]: I0929 23:46:24.250005 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"098321c4-3c5e-485c-bc49-fe5f5bc63b6e","Type":"ContainerStarted","Data":"20ebd49c4ef01170e1e90146320c8274121371db847f5ee3a56602f5865e003a"} Sep 29 23:46:24 crc kubenswrapper[4922]: I0929 23:46:24.437200 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aad64b0f-67ee-43d9-b0f1-fef345d70e74" path="/var/lib/kubelet/pods/aad64b0f-67ee-43d9-b0f1-fef345d70e74/volumes" Sep 29 23:46:26 crc kubenswrapper[4922]: I0929 23:46:26.429339 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:46:26 crc kubenswrapper[4922]: E0929 23:46:26.430176 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:46:40 crc kubenswrapper[4922]: I0929 23:46:40.422206 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:46:40 crc kubenswrapper[4922]: E0929 23:46:40.423231 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:46:53 crc kubenswrapper[4922]: I0929 23:46:53.422826 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:46:53 crc kubenswrapper[4922]: E0929 23:46:53.424221 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:46:53 crc kubenswrapper[4922]: I0929 23:46:53.545757 4922 generic.go:334] "Generic (PLEG): container finished" podID="611a3899-4697-458f-8e48-6516d4b9e899" containerID="d78f263789b259e6aa78dd1b292bf664cc99bc43ef4e9884ca9a39ce828f114b" exitCode=0 Sep 29 23:46:53 crc kubenswrapper[4922]: I0929 23:46:53.545824 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"611a3899-4697-458f-8e48-6516d4b9e899","Type":"ContainerDied","Data":"d78f263789b259e6aa78dd1b292bf664cc99bc43ef4e9884ca9a39ce828f114b"} Sep 29 23:46:54 crc kubenswrapper[4922]: I0929 23:46:54.559973 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"611a3899-4697-458f-8e48-6516d4b9e899","Type":"ContainerStarted","Data":"25a19f855ebfed32d90faab4e0e6a64c9d7f1a02954dee5abada0fcb022d3c69"} Sep 29 23:46:54 crc kubenswrapper[4922]: I0929 23:46:54.560618 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 23:46:54 crc kubenswrapper[4922]: I0929 23:46:54.603009 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=33.602984512 podStartE2EDuration="33.602984512s" podCreationTimestamp="2025-09-29 23:46:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:46:54.594095973 +0000 UTC m=+4818.904384786" watchObservedRunningTime="2025-09-29 23:46:54.602984512 +0000 UTC m=+4818.913273365" Sep 29 23:46:55 crc kubenswrapper[4922]: I0929 23:46:55.569290 4922 generic.go:334] "Generic (PLEG): container finished" podID="098321c4-3c5e-485c-bc49-fe5f5bc63b6e" containerID="08523db283543626d59d4e088f109c37b23b219c66a085575d830cc220405d41" exitCode=0 Sep 29 23:46:55 crc kubenswrapper[4922]: I0929 23:46:55.569465 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"098321c4-3c5e-485c-bc49-fe5f5bc63b6e","Type":"ContainerDied","Data":"08523db283543626d59d4e088f109c37b23b219c66a085575d830cc220405d41"} Sep 29 23:46:56 crc kubenswrapper[4922]: I0929 23:46:56.582099 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"098321c4-3c5e-485c-bc49-fe5f5bc63b6e","Type":"ContainerStarted","Data":"b65539d6bb972e2d0ea628771155065984fc4ad44419792456d575c465f4e9ae"} Sep 29 23:46:56 crc kubenswrapper[4922]: I0929 23:46:56.582883 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:46:56 crc kubenswrapper[4922]: I0929 23:46:56.657872 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=34.657850548 podStartE2EDuration="34.657850548s" podCreationTimestamp="2025-09-29 23:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:46:56.656915195 +0000 UTC m=+4820.967204008" watchObservedRunningTime="2025-09-29 23:46:56.657850548 +0000 UTC m=+4820.968139361" Sep 29 23:47:05 crc kubenswrapper[4922]: I0929 23:47:05.422124 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:47:05 crc kubenswrapper[4922]: E0929 23:47:05.423353 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:47:11 crc kubenswrapper[4922]: I0929 23:47:11.808620 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 23:47:12 crc kubenswrapper[4922]: I0929 23:47:12.630662 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.405434 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Sep 29 23:47:20 crc kubenswrapper[4922]: E0929 23:47:20.406747 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad64b0f-67ee-43d9-b0f1-fef345d70e74" containerName="dnsmasq-dns" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.406771 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad64b0f-67ee-43d9-b0f1-fef345d70e74" containerName="dnsmasq-dns" Sep 29 23:47:20 crc kubenswrapper[4922]: E0929 23:47:20.406808 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aad64b0f-67ee-43d9-b0f1-fef345d70e74" containerName="init" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.406820 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="aad64b0f-67ee-43d9-b0f1-fef345d70e74" containerName="init" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.407092 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="aad64b0f-67ee-43d9-b0f1-fef345d70e74" containerName="dnsmasq-dns" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.408026 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.411556 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-nb56t" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.416331 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.429701 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:47:20 crc kubenswrapper[4922]: E0929 23:47:20.429942 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.468168 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdv8q\" (UniqueName: \"kubernetes.io/projected/cd83cd8d-8563-41d6-9008-c8193233c5c8-kube-api-access-sdv8q\") pod \"mariadb-client-1-default\" (UID: \"cd83cd8d-8563-41d6-9008-c8193233c5c8\") " pod="openstack/mariadb-client-1-default" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.570529 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdv8q\" (UniqueName: \"kubernetes.io/projected/cd83cd8d-8563-41d6-9008-c8193233c5c8-kube-api-access-sdv8q\") pod \"mariadb-client-1-default\" (UID: \"cd83cd8d-8563-41d6-9008-c8193233c5c8\") " pod="openstack/mariadb-client-1-default" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.591231 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdv8q\" (UniqueName: \"kubernetes.io/projected/cd83cd8d-8563-41d6-9008-c8193233c5c8-kube-api-access-sdv8q\") pod \"mariadb-client-1-default\" (UID: \"cd83cd8d-8563-41d6-9008-c8193233c5c8\") " pod="openstack/mariadb-client-1-default" Sep 29 23:47:20 crc kubenswrapper[4922]: I0929 23:47:20.747975 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 29 23:47:21 crc kubenswrapper[4922]: I0929 23:47:21.334929 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 29 23:47:21 crc kubenswrapper[4922]: I0929 23:47:21.825949 4922 generic.go:334] "Generic (PLEG): container finished" podID="cd83cd8d-8563-41d6-9008-c8193233c5c8" containerID="94ce9a15e6aaf2abf2622537dcc880b3a68661e84e94966b8b67abddfb72c52f" exitCode=0 Sep 29 23:47:21 crc kubenswrapper[4922]: I0929 23:47:21.826065 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"cd83cd8d-8563-41d6-9008-c8193233c5c8","Type":"ContainerDied","Data":"94ce9a15e6aaf2abf2622537dcc880b3a68661e84e94966b8b67abddfb72c52f"} Sep 29 23:47:21 crc kubenswrapper[4922]: I0929 23:47:21.826483 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"cd83cd8d-8563-41d6-9008-c8193233c5c8","Type":"ContainerStarted","Data":"db8763f78dc49a8d0b5d5f1d62c8bd13303382bbe14ce15bddf8550b1d2b5923"} Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.755272 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2d4xh"] Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.757261 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.769864 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2d4xh"] Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.812942 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f77p5\" (UniqueName: \"kubernetes.io/projected/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-kube-api-access-f77p5\") pod \"certified-operators-2d4xh\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.813503 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-catalog-content\") pod \"certified-operators-2d4xh\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.813575 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-utilities\") pod \"certified-operators-2d4xh\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.915606 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-catalog-content\") pod \"certified-operators-2d4xh\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.915703 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-utilities\") pod \"certified-operators-2d4xh\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.916180 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-catalog-content\") pod \"certified-operators-2d4xh\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.916477 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-utilities\") pod \"certified-operators-2d4xh\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.916603 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f77p5\" (UniqueName: \"kubernetes.io/projected/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-kube-api-access-f77p5\") pod \"certified-operators-2d4xh\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.947982 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qtk7b"] Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.949826 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.956824 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f77p5\" (UniqueName: \"kubernetes.io/projected/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-kube-api-access-f77p5\") pod \"certified-operators-2d4xh\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:22 crc kubenswrapper[4922]: I0929 23:47:22.969091 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtk7b"] Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.119074 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgf8c\" (UniqueName: \"kubernetes.io/projected/ed888449-0417-490b-bd66-f70e7e8a76e9-kube-api-access-vgf8c\") pod \"redhat-marketplace-qtk7b\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.119271 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-catalog-content\") pod \"redhat-marketplace-qtk7b\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.119331 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-utilities\") pod \"redhat-marketplace-qtk7b\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.143576 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.220417 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-catalog-content\") pod \"redhat-marketplace-qtk7b\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.220795 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-utilities\") pod \"redhat-marketplace-qtk7b\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.220818 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgf8c\" (UniqueName: \"kubernetes.io/projected/ed888449-0417-490b-bd66-f70e7e8a76e9-kube-api-access-vgf8c\") pod \"redhat-marketplace-qtk7b\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.221097 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-catalog-content\") pod \"redhat-marketplace-qtk7b\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.221340 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-utilities\") pod \"redhat-marketplace-qtk7b\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.236982 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgf8c\" (UniqueName: \"kubernetes.io/projected/ed888449-0417-490b-bd66-f70e7e8a76e9-kube-api-access-vgf8c\") pod \"redhat-marketplace-qtk7b\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.302480 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.315737 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.348887 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_cd83cd8d-8563-41d6-9008-c8193233c5c8/mariadb-client-1-default/0.log" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.389435 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.404824 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.423247 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdv8q\" (UniqueName: \"kubernetes.io/projected/cd83cd8d-8563-41d6-9008-c8193233c5c8-kube-api-access-sdv8q\") pod \"cd83cd8d-8563-41d6-9008-c8193233c5c8\" (UID: \"cd83cd8d-8563-41d6-9008-c8193233c5c8\") " Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.426977 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2d4xh"] Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.430789 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd83cd8d-8563-41d6-9008-c8193233c5c8-kube-api-access-sdv8q" (OuterVolumeSpecName: "kube-api-access-sdv8q") pod "cd83cd8d-8563-41d6-9008-c8193233c5c8" (UID: "cd83cd8d-8563-41d6-9008-c8193233c5c8"). InnerVolumeSpecName "kube-api-access-sdv8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:23 crc kubenswrapper[4922]: W0929 23:47:23.447477 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7d1eaf7_cb93_4965_bb9c_b9d59a6a06a8.slice/crio-710aba5aeebb4968782a74a7586b8ac64c5a623af776f21dee52f291ee60ad68 WatchSource:0}: Error finding container 710aba5aeebb4968782a74a7586b8ac64c5a623af776f21dee52f291ee60ad68: Status 404 returned error can't find the container with id 710aba5aeebb4968782a74a7586b8ac64c5a623af776f21dee52f291ee60ad68 Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.525409 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdv8q\" (UniqueName: \"kubernetes.io/projected/cd83cd8d-8563-41d6-9008-c8193233c5c8-kube-api-access-sdv8q\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.821029 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtk7b"] Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.848314 4922 generic.go:334] "Generic (PLEG): container finished" podID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerID="1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c" exitCode=0 Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.848377 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2d4xh" event={"ID":"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8","Type":"ContainerDied","Data":"1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c"} Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.848443 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2d4xh" event={"ID":"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8","Type":"ContainerStarted","Data":"710aba5aeebb4968782a74a7586b8ac64c5a623af776f21dee52f291ee60ad68"} Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.850552 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.850593 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db8763f78dc49a8d0b5d5f1d62c8bd13303382bbe14ce15bddf8550b1d2b5923" Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.852628 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtk7b" event={"ID":"ed888449-0417-490b-bd66-f70e7e8a76e9","Type":"ContainerStarted","Data":"c2beafb7007909b3233656ddb0a84ab3386138fd9c8bf894dcc7b3a108745faa"} Sep 29 23:47:23 crc kubenswrapper[4922]: I0929 23:47:23.854892 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.039835 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Sep 29 23:47:24 crc kubenswrapper[4922]: E0929 23:47:24.040223 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd83cd8d-8563-41d6-9008-c8193233c5c8" containerName="mariadb-client-1-default" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.040238 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd83cd8d-8563-41d6-9008-c8193233c5c8" containerName="mariadb-client-1-default" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.040531 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd83cd8d-8563-41d6-9008-c8193233c5c8" containerName="mariadb-client-1-default" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.041190 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.046216 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-nb56t" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.051875 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.237481 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4srk\" (UniqueName: \"kubernetes.io/projected/b9e58497-1f55-4ccd-a219-ed3321642f92-kube-api-access-h4srk\") pod \"mariadb-client-2-default\" (UID: \"b9e58497-1f55-4ccd-a219-ed3321642f92\") " pod="openstack/mariadb-client-2-default" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.339145 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4srk\" (UniqueName: \"kubernetes.io/projected/b9e58497-1f55-4ccd-a219-ed3321642f92-kube-api-access-h4srk\") pod \"mariadb-client-2-default\" (UID: \"b9e58497-1f55-4ccd-a219-ed3321642f92\") " pod="openstack/mariadb-client-2-default" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.372452 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4srk\" (UniqueName: \"kubernetes.io/projected/b9e58497-1f55-4ccd-a219-ed3321642f92-kube-api-access-h4srk\") pod \"mariadb-client-2-default\" (UID: \"b9e58497-1f55-4ccd-a219-ed3321642f92\") " pod="openstack/mariadb-client-2-default" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.442938 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd83cd8d-8563-41d6-9008-c8193233c5c8" path="/var/lib/kubelet/pods/cd83cd8d-8563-41d6-9008-c8193233c5c8/volumes" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.670388 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.869893 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2d4xh" event={"ID":"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8","Type":"ContainerStarted","Data":"2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567"} Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.877295 4922 generic.go:334] "Generic (PLEG): container finished" podID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerID="5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02" exitCode=0 Sep 29 23:47:24 crc kubenswrapper[4922]: I0929 23:47:24.877334 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtk7b" event={"ID":"ed888449-0417-490b-bd66-f70e7e8a76e9","Type":"ContainerDied","Data":"5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02"} Sep 29 23:47:25 crc kubenswrapper[4922]: I0929 23:47:25.300625 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 29 23:47:25 crc kubenswrapper[4922]: I0929 23:47:25.885426 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"b9e58497-1f55-4ccd-a219-ed3321642f92","Type":"ContainerStarted","Data":"83c2723de37ea32586633f61e1719af27f1ec963c5f81f3acf3ad46956d7f12d"} Sep 29 23:47:25 crc kubenswrapper[4922]: I0929 23:47:25.887792 4922 generic.go:334] "Generic (PLEG): container finished" podID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerID="2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567" exitCode=0 Sep 29 23:47:25 crc kubenswrapper[4922]: I0929 23:47:25.887828 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2d4xh" event={"ID":"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8","Type":"ContainerDied","Data":"2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567"} Sep 29 23:47:26 crc kubenswrapper[4922]: I0929 23:47:26.901601 4922 generic.go:334] "Generic (PLEG): container finished" podID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerID="bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d" exitCode=0 Sep 29 23:47:26 crc kubenswrapper[4922]: I0929 23:47:26.902187 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtk7b" event={"ID":"ed888449-0417-490b-bd66-f70e7e8a76e9","Type":"ContainerDied","Data":"bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d"} Sep 29 23:47:26 crc kubenswrapper[4922]: I0929 23:47:26.911444 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"b9e58497-1f55-4ccd-a219-ed3321642f92","Type":"ContainerDied","Data":"e9d5079334c9f729cba6e119789c518d9583c70876a4a79141d21b8b29d58cf2"} Sep 29 23:47:26 crc kubenswrapper[4922]: I0929 23:47:26.912199 4922 generic.go:334] "Generic (PLEG): container finished" podID="b9e58497-1f55-4ccd-a219-ed3321642f92" containerID="e9d5079334c9f729cba6e119789c518d9583c70876a4a79141d21b8b29d58cf2" exitCode=0 Sep 29 23:47:26 crc kubenswrapper[4922]: I0929 23:47:26.917868 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2d4xh" event={"ID":"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8","Type":"ContainerStarted","Data":"f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802"} Sep 29 23:47:26 crc kubenswrapper[4922]: I0929 23:47:26.998818 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2d4xh" podStartSLOduration=2.488401942 podStartE2EDuration="4.998778409s" podCreationTimestamp="2025-09-29 23:47:22 +0000 UTC" firstStartedPulling="2025-09-29 23:47:23.854625722 +0000 UTC m=+4848.164914535" lastFinishedPulling="2025-09-29 23:47:26.365002159 +0000 UTC m=+4850.675291002" observedRunningTime="2025-09-29 23:47:26.979708809 +0000 UTC m=+4851.289997672" watchObservedRunningTime="2025-09-29 23:47:26.998778409 +0000 UTC m=+4851.309067272" Sep 29 23:47:27 crc kubenswrapper[4922]: I0929 23:47:27.929881 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtk7b" event={"ID":"ed888449-0417-490b-bd66-f70e7e8a76e9","Type":"ContainerStarted","Data":"9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d"} Sep 29 23:47:27 crc kubenswrapper[4922]: I0929 23:47:27.960450 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qtk7b" podStartSLOduration=3.546182196 podStartE2EDuration="5.960424508s" podCreationTimestamp="2025-09-29 23:47:22 +0000 UTC" firstStartedPulling="2025-09-29 23:47:24.879664991 +0000 UTC m=+4849.189953804" lastFinishedPulling="2025-09-29 23:47:27.293907283 +0000 UTC m=+4851.604196116" observedRunningTime="2025-09-29 23:47:27.9552267 +0000 UTC m=+4852.265515543" watchObservedRunningTime="2025-09-29 23:47:27.960424508 +0000 UTC m=+4852.270713341" Sep 29 23:47:28 crc kubenswrapper[4922]: I0929 23:47:28.474352 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 29 23:47:28 crc kubenswrapper[4922]: I0929 23:47:28.515196 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2-default_b9e58497-1f55-4ccd-a219-ed3321642f92/mariadb-client-2-default/0.log" Sep 29 23:47:28 crc kubenswrapper[4922]: I0929 23:47:28.520143 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4srk\" (UniqueName: \"kubernetes.io/projected/b9e58497-1f55-4ccd-a219-ed3321642f92-kube-api-access-h4srk\") pod \"b9e58497-1f55-4ccd-a219-ed3321642f92\" (UID: \"b9e58497-1f55-4ccd-a219-ed3321642f92\") " Sep 29 23:47:28 crc kubenswrapper[4922]: I0929 23:47:28.547994 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9e58497-1f55-4ccd-a219-ed3321642f92-kube-api-access-h4srk" (OuterVolumeSpecName: "kube-api-access-h4srk") pod "b9e58497-1f55-4ccd-a219-ed3321642f92" (UID: "b9e58497-1f55-4ccd-a219-ed3321642f92"). InnerVolumeSpecName "kube-api-access-h4srk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:28 crc kubenswrapper[4922]: I0929 23:47:28.549051 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 29 23:47:28 crc kubenswrapper[4922]: I0929 23:47:28.557503 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Sep 29 23:47:28 crc kubenswrapper[4922]: I0929 23:47:28.624147 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4srk\" (UniqueName: \"kubernetes.io/projected/b9e58497-1f55-4ccd-a219-ed3321642f92-kube-api-access-h4srk\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:28 crc kubenswrapper[4922]: I0929 23:47:28.942946 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Sep 29 23:47:28 crc kubenswrapper[4922]: I0929 23:47:28.942946 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83c2723de37ea32586633f61e1719af27f1ec963c5f81f3acf3ad46956d7f12d" Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.136979 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Sep 29 23:47:29 crc kubenswrapper[4922]: E0929 23:47:29.137868 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9e58497-1f55-4ccd-a219-ed3321642f92" containerName="mariadb-client-2-default" Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.137902 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9e58497-1f55-4ccd-a219-ed3321642f92" containerName="mariadb-client-2-default" Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.138160 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9e58497-1f55-4ccd-a219-ed3321642f92" containerName="mariadb-client-2-default" Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.138914 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.143028 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-nb56t" Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.154779 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.334896 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgc7d\" (UniqueName: \"kubernetes.io/projected/0f197845-e608-4bab-bbff-fc5b298d0837-kube-api-access-mgc7d\") pod \"mariadb-client-1\" (UID: \"0f197845-e608-4bab-bbff-fc5b298d0837\") " pod="openstack/mariadb-client-1" Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.436600 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgc7d\" (UniqueName: \"kubernetes.io/projected/0f197845-e608-4bab-bbff-fc5b298d0837-kube-api-access-mgc7d\") pod \"mariadb-client-1\" (UID: \"0f197845-e608-4bab-bbff-fc5b298d0837\") " pod="openstack/mariadb-client-1" Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.467784 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgc7d\" (UniqueName: \"kubernetes.io/projected/0f197845-e608-4bab-bbff-fc5b298d0837-kube-api-access-mgc7d\") pod \"mariadb-client-1\" (UID: \"0f197845-e608-4bab-bbff-fc5b298d0837\") " pod="openstack/mariadb-client-1" Sep 29 23:47:29 crc kubenswrapper[4922]: I0929 23:47:29.764737 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 29 23:47:30 crc kubenswrapper[4922]: I0929 23:47:30.354212 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Sep 29 23:47:30 crc kubenswrapper[4922]: W0929 23:47:30.362471 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f197845_e608_4bab_bbff_fc5b298d0837.slice/crio-958d8f38216a6d3862e849d53c783d2450e0fbc905b8e26915449250229fb32b WatchSource:0}: Error finding container 958d8f38216a6d3862e849d53c783d2450e0fbc905b8e26915449250229fb32b: Status 404 returned error can't find the container with id 958d8f38216a6d3862e849d53c783d2450e0fbc905b8e26915449250229fb32b Sep 29 23:47:30 crc kubenswrapper[4922]: I0929 23:47:30.437592 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9e58497-1f55-4ccd-a219-ed3321642f92" path="/var/lib/kubelet/pods/b9e58497-1f55-4ccd-a219-ed3321642f92/volumes" Sep 29 23:47:30 crc kubenswrapper[4922]: I0929 23:47:30.966085 4922 generic.go:334] "Generic (PLEG): container finished" podID="0f197845-e608-4bab-bbff-fc5b298d0837" containerID="5a3ae0a7424d21de8919f5a6d999bf3b283f9d3e181302bc9cf687b70790df5c" exitCode=0 Sep 29 23:47:30 crc kubenswrapper[4922]: I0929 23:47:30.966159 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"0f197845-e608-4bab-bbff-fc5b298d0837","Type":"ContainerDied","Data":"5a3ae0a7424d21de8919f5a6d999bf3b283f9d3e181302bc9cf687b70790df5c"} Sep 29 23:47:30 crc kubenswrapper[4922]: I0929 23:47:30.966202 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"0f197845-e608-4bab-bbff-fc5b298d0837","Type":"ContainerStarted","Data":"958d8f38216a6d3862e849d53c783d2450e0fbc905b8e26915449250229fb32b"} Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.426805 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.580387 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.603638 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_0f197845-e608-4bab-bbff-fc5b298d0837/mariadb-client-1/0.log" Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.667606 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.674623 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.715655 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgc7d\" (UniqueName: \"kubernetes.io/projected/0f197845-e608-4bab-bbff-fc5b298d0837-kube-api-access-mgc7d\") pod \"0f197845-e608-4bab-bbff-fc5b298d0837\" (UID: \"0f197845-e608-4bab-bbff-fc5b298d0837\") " Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.726189 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f197845-e608-4bab-bbff-fc5b298d0837-kube-api-access-mgc7d" (OuterVolumeSpecName: "kube-api-access-mgc7d") pod "0f197845-e608-4bab-bbff-fc5b298d0837" (UID: "0f197845-e608-4bab-bbff-fc5b298d0837"). InnerVolumeSpecName "kube-api-access-mgc7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.817859 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgc7d\" (UniqueName: \"kubernetes.io/projected/0f197845-e608-4bab-bbff-fc5b298d0837-kube-api-access-mgc7d\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.997637 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="958d8f38216a6d3862e849d53c783d2450e0fbc905b8e26915449250229fb32b" Sep 29 23:47:32 crc kubenswrapper[4922]: I0929 23:47:32.997701 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.001440 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"ff9f576c4f920051ee029d64618cf6e7e349c0b190d881ecf9094b87fe73dac7"} Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.144667 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.145134 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.195869 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.233438 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Sep 29 23:47:33 crc kubenswrapper[4922]: E0929 23:47:33.233899 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f197845-e608-4bab-bbff-fc5b298d0837" containerName="mariadb-client-1" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.233912 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f197845-e608-4bab-bbff-fc5b298d0837" containerName="mariadb-client-1" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.234109 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f197845-e608-4bab-bbff-fc5b298d0837" containerName="mariadb-client-1" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.234775 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.236909 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-nb56t" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.251603 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.316801 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.316845 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.378624 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.434217 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5ckr\" (UniqueName: \"kubernetes.io/projected/3d924f38-bea0-4ac4-bf1f-e6e0bc11999f-kube-api-access-k5ckr\") pod \"mariadb-client-4-default\" (UID: \"3d924f38-bea0-4ac4-bf1f-e6e0bc11999f\") " pod="openstack/mariadb-client-4-default" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.536460 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5ckr\" (UniqueName: \"kubernetes.io/projected/3d924f38-bea0-4ac4-bf1f-e6e0bc11999f-kube-api-access-k5ckr\") pod \"mariadb-client-4-default\" (UID: \"3d924f38-bea0-4ac4-bf1f-e6e0bc11999f\") " pod="openstack/mariadb-client-4-default" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.558316 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5ckr\" (UniqueName: \"kubernetes.io/projected/3d924f38-bea0-4ac4-bf1f-e6e0bc11999f-kube-api-access-k5ckr\") pod \"mariadb-client-4-default\" (UID: \"3d924f38-bea0-4ac4-bf1f-e6e0bc11999f\") " pod="openstack/mariadb-client-4-default" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.573254 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 29 23:47:33 crc kubenswrapper[4922]: I0929 23:47:33.933387 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 29 23:47:33 crc kubenswrapper[4922]: W0929 23:47:33.941693 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d924f38_bea0_4ac4_bf1f_e6e0bc11999f.slice/crio-beee93971e539d2228fb0c7061cf9b4af56760850febcc3f1d84e2faafdf65ff WatchSource:0}: Error finding container beee93971e539d2228fb0c7061cf9b4af56760850febcc3f1d84e2faafdf65ff: Status 404 returned error can't find the container with id beee93971e539d2228fb0c7061cf9b4af56760850febcc3f1d84e2faafdf65ff Sep 29 23:47:34 crc kubenswrapper[4922]: I0929 23:47:34.018056 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"3d924f38-bea0-4ac4-bf1f-e6e0bc11999f","Type":"ContainerStarted","Data":"beee93971e539d2228fb0c7061cf9b4af56760850febcc3f1d84e2faafdf65ff"} Sep 29 23:47:34 crc kubenswrapper[4922]: I0929 23:47:34.088875 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:34 crc kubenswrapper[4922]: I0929 23:47:34.090336 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:34 crc kubenswrapper[4922]: I0929 23:47:34.437380 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f197845-e608-4bab-bbff-fc5b298d0837" path="/var/lib/kubelet/pods/0f197845-e608-4bab-bbff-fc5b298d0837/volumes" Sep 29 23:47:34 crc kubenswrapper[4922]: I0929 23:47:34.746471 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtk7b"] Sep 29 23:47:35 crc kubenswrapper[4922]: I0929 23:47:35.027984 4922 generic.go:334] "Generic (PLEG): container finished" podID="3d924f38-bea0-4ac4-bf1f-e6e0bc11999f" containerID="ab94c340f8adaf3757f6cb0fa1eaf9a0a246c6167f173931d1d13c4140d87c20" exitCode=0 Sep 29 23:47:35 crc kubenswrapper[4922]: I0929 23:47:35.028629 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"3d924f38-bea0-4ac4-bf1f-e6e0bc11999f","Type":"ContainerDied","Data":"ab94c340f8adaf3757f6cb0fa1eaf9a0a246c6167f173931d1d13c4140d87c20"} Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.036892 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qtk7b" podUID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerName="registry-server" containerID="cri-o://9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d" gracePeriod=2 Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.510635 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.515961 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.528538 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_3d924f38-bea0-4ac4-bf1f-e6e0bc11999f/mariadb-client-4-default/0.log" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.542245 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2d4xh"] Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.542510 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2d4xh" podUID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerName="registry-server" containerID="cri-o://f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802" gracePeriod=2 Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.563865 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.568231 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.592278 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5ckr\" (UniqueName: \"kubernetes.io/projected/3d924f38-bea0-4ac4-bf1f-e6e0bc11999f-kube-api-access-k5ckr\") pod \"3d924f38-bea0-4ac4-bf1f-e6e0bc11999f\" (UID: \"3d924f38-bea0-4ac4-bf1f-e6e0bc11999f\") " Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.592477 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgf8c\" (UniqueName: \"kubernetes.io/projected/ed888449-0417-490b-bd66-f70e7e8a76e9-kube-api-access-vgf8c\") pod \"ed888449-0417-490b-bd66-f70e7e8a76e9\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.592556 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-utilities\") pod \"ed888449-0417-490b-bd66-f70e7e8a76e9\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.592654 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-catalog-content\") pod \"ed888449-0417-490b-bd66-f70e7e8a76e9\" (UID: \"ed888449-0417-490b-bd66-f70e7e8a76e9\") " Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.593710 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-utilities" (OuterVolumeSpecName: "utilities") pod "ed888449-0417-490b-bd66-f70e7e8a76e9" (UID: "ed888449-0417-490b-bd66-f70e7e8a76e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.597506 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed888449-0417-490b-bd66-f70e7e8a76e9-kube-api-access-vgf8c" (OuterVolumeSpecName: "kube-api-access-vgf8c") pod "ed888449-0417-490b-bd66-f70e7e8a76e9" (UID: "ed888449-0417-490b-bd66-f70e7e8a76e9"). InnerVolumeSpecName "kube-api-access-vgf8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.598524 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d924f38-bea0-4ac4-bf1f-e6e0bc11999f-kube-api-access-k5ckr" (OuterVolumeSpecName: "kube-api-access-k5ckr") pod "3d924f38-bea0-4ac4-bf1f-e6e0bc11999f" (UID: "3d924f38-bea0-4ac4-bf1f-e6e0bc11999f"). InnerVolumeSpecName "kube-api-access-k5ckr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.605870 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed888449-0417-490b-bd66-f70e7e8a76e9" (UID: "ed888449-0417-490b-bd66-f70e7e8a76e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.694657 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5ckr\" (UniqueName: \"kubernetes.io/projected/3d924f38-bea0-4ac4-bf1f-e6e0bc11999f-kube-api-access-k5ckr\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.694711 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgf8c\" (UniqueName: \"kubernetes.io/projected/ed888449-0417-490b-bd66-f70e7e8a76e9-kube-api-access-vgf8c\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.694731 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.694749 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed888449-0417-490b-bd66-f70e7e8a76e9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.928836 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.998345 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-utilities\") pod \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.999460 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-catalog-content\") pod \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.999540 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f77p5\" (UniqueName: \"kubernetes.io/projected/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-kube-api-access-f77p5\") pod \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\" (UID: \"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8\") " Sep 29 23:47:36 crc kubenswrapper[4922]: I0929 23:47:36.999957 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-utilities" (OuterVolumeSpecName: "utilities") pod "e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" (UID: "e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.003725 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-kube-api-access-f77p5" (OuterVolumeSpecName: "kube-api-access-f77p5") pod "e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" (UID: "e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8"). InnerVolumeSpecName "kube-api-access-f77p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.046844 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" (UID: "e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.048467 4922 generic.go:334] "Generic (PLEG): container finished" podID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerID="9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d" exitCode=0 Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.048543 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtk7b" event={"ID":"ed888449-0417-490b-bd66-f70e7e8a76e9","Type":"ContainerDied","Data":"9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d"} Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.048597 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qtk7b" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.048624 4922 scope.go:117] "RemoveContainer" containerID="9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.048605 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qtk7b" event={"ID":"ed888449-0417-490b-bd66-f70e7e8a76e9","Type":"ContainerDied","Data":"c2beafb7007909b3233656ddb0a84ab3386138fd9c8bf894dcc7b3a108745faa"} Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.052059 4922 generic.go:334] "Generic (PLEG): container finished" podID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerID="f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802" exitCode=0 Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.052129 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2d4xh" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.052165 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2d4xh" event={"ID":"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8","Type":"ContainerDied","Data":"f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802"} Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.052205 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2d4xh" event={"ID":"e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8","Type":"ContainerDied","Data":"710aba5aeebb4968782a74a7586b8ac64c5a623af776f21dee52f291ee60ad68"} Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.055608 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="beee93971e539d2228fb0c7061cf9b4af56760850febcc3f1d84e2faafdf65ff" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.055658 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.075123 4922 scope.go:117] "RemoveContainer" containerID="bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.101977 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.102025 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f77p5\" (UniqueName: \"kubernetes.io/projected/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-kube-api-access-f77p5\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.102044 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.110559 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2d4xh"] Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.111145 4922 scope.go:117] "RemoveContainer" containerID="5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.127907 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2d4xh"] Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.132743 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtk7b"] Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.136893 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qtk7b"] Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.140808 4922 scope.go:117] "RemoveContainer" containerID="9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d" Sep 29 23:47:37 crc kubenswrapper[4922]: E0929 23:47:37.141253 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d\": container with ID starting with 9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d not found: ID does not exist" containerID="9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.141284 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d"} err="failed to get container status \"9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d\": rpc error: code = NotFound desc = could not find container \"9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d\": container with ID starting with 9e297024f83641c7aa984632bf0b4d5ea3353ca54ccb08ec06fba6a66932636d not found: ID does not exist" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.141310 4922 scope.go:117] "RemoveContainer" containerID="bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d" Sep 29 23:47:37 crc kubenswrapper[4922]: E0929 23:47:37.141792 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d\": container with ID starting with bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d not found: ID does not exist" containerID="bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.141828 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d"} err="failed to get container status \"bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d\": rpc error: code = NotFound desc = could not find container \"bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d\": container with ID starting with bf1b04ae4a650b3605424111ed3c9f5d04d2d05d3f90322e0f3dacc5d78f2a7d not found: ID does not exist" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.141853 4922 scope.go:117] "RemoveContainer" containerID="5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02" Sep 29 23:47:37 crc kubenswrapper[4922]: E0929 23:47:37.142147 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02\": container with ID starting with 5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02 not found: ID does not exist" containerID="5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.142197 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02"} err="failed to get container status \"5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02\": rpc error: code = NotFound desc = could not find container \"5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02\": container with ID starting with 5ea574ed1dc802ff158cf2f8da64e9f0ff9b2202fe7e6d77297ece12db8c8c02 not found: ID does not exist" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.142228 4922 scope.go:117] "RemoveContainer" containerID="f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.157913 4922 scope.go:117] "RemoveContainer" containerID="2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.226638 4922 scope.go:117] "RemoveContainer" containerID="1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.252180 4922 scope.go:117] "RemoveContainer" containerID="f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802" Sep 29 23:47:37 crc kubenswrapper[4922]: E0929 23:47:37.253000 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802\": container with ID starting with f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802 not found: ID does not exist" containerID="f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.253059 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802"} err="failed to get container status \"f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802\": rpc error: code = NotFound desc = could not find container \"f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802\": container with ID starting with f179ff6089a43c5be8973cb174e72b54a4779c0864b20c1457fe0b746061f802 not found: ID does not exist" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.253093 4922 scope.go:117] "RemoveContainer" containerID="2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567" Sep 29 23:47:37 crc kubenswrapper[4922]: E0929 23:47:37.253450 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567\": container with ID starting with 2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567 not found: ID does not exist" containerID="2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.253481 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567"} err="failed to get container status \"2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567\": rpc error: code = NotFound desc = could not find container \"2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567\": container with ID starting with 2af557a6cffb2d4bc2d0de737bb5c2f167b150bd037edaed3d4027b2d2d99567 not found: ID does not exist" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.253517 4922 scope.go:117] "RemoveContainer" containerID="1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c" Sep 29 23:47:37 crc kubenswrapper[4922]: E0929 23:47:37.253796 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c\": container with ID starting with 1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c not found: ID does not exist" containerID="1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c" Sep 29 23:47:37 crc kubenswrapper[4922]: I0929 23:47:37.253820 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c"} err="failed to get container status \"1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c\": rpc error: code = NotFound desc = could not find container \"1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c\": container with ID starting with 1cc1ce9528bbc46229db6414c4a01ac22fcd3dce828107295810818d1408c03c not found: ID does not exist" Sep 29 23:47:38 crc kubenswrapper[4922]: I0929 23:47:38.440706 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d924f38-bea0-4ac4-bf1f-e6e0bc11999f" path="/var/lib/kubelet/pods/3d924f38-bea0-4ac4-bf1f-e6e0bc11999f/volumes" Sep 29 23:47:38 crc kubenswrapper[4922]: I0929 23:47:38.442183 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" path="/var/lib/kubelet/pods/e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8/volumes" Sep 29 23:47:38 crc kubenswrapper[4922]: I0929 23:47:38.443611 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed888449-0417-490b-bd66-f70e7e8a76e9" path="/var/lib/kubelet/pods/ed888449-0417-490b-bd66-f70e7e8a76e9/volumes" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.237769 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Sep 29 23:47:41 crc kubenswrapper[4922]: E0929 23:47:41.238645 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerName="registry-server" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.238668 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerName="registry-server" Sep 29 23:47:41 crc kubenswrapper[4922]: E0929 23:47:41.238686 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerName="extract-utilities" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.238696 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerName="extract-utilities" Sep 29 23:47:41 crc kubenswrapper[4922]: E0929 23:47:41.238715 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerName="extract-utilities" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.238725 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerName="extract-utilities" Sep 29 23:47:41 crc kubenswrapper[4922]: E0929 23:47:41.238745 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerName="extract-content" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.238754 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerName="extract-content" Sep 29 23:47:41 crc kubenswrapper[4922]: E0929 23:47:41.238770 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerName="extract-content" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.238779 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerName="extract-content" Sep 29 23:47:41 crc kubenswrapper[4922]: E0929 23:47:41.238801 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d924f38-bea0-4ac4-bf1f-e6e0bc11999f" containerName="mariadb-client-4-default" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.238810 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d924f38-bea0-4ac4-bf1f-e6e0bc11999f" containerName="mariadb-client-4-default" Sep 29 23:47:41 crc kubenswrapper[4922]: E0929 23:47:41.238830 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerName="registry-server" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.238839 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerName="registry-server" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.239093 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d924f38-bea0-4ac4-bf1f-e6e0bc11999f" containerName="mariadb-client-4-default" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.239122 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7d1eaf7-cb93-4965-bb9c-b9d59a6a06a8" containerName="registry-server" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.239150 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed888449-0417-490b-bd66-f70e7e8a76e9" containerName="registry-server" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.240115 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.242897 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-nb56t" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.251255 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.277118 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgsmb\" (UniqueName: \"kubernetes.io/projected/7a67df37-889e-4b45-949b-ceaca5ac719e-kube-api-access-mgsmb\") pod \"mariadb-client-5-default\" (UID: \"7a67df37-889e-4b45-949b-ceaca5ac719e\") " pod="openstack/mariadb-client-5-default" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.378463 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgsmb\" (UniqueName: \"kubernetes.io/projected/7a67df37-889e-4b45-949b-ceaca5ac719e-kube-api-access-mgsmb\") pod \"mariadb-client-5-default\" (UID: \"7a67df37-889e-4b45-949b-ceaca5ac719e\") " pod="openstack/mariadb-client-5-default" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.750520 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgsmb\" (UniqueName: \"kubernetes.io/projected/7a67df37-889e-4b45-949b-ceaca5ac719e-kube-api-access-mgsmb\") pod \"mariadb-client-5-default\" (UID: \"7a67df37-889e-4b45-949b-ceaca5ac719e\") " pod="openstack/mariadb-client-5-default" Sep 29 23:47:41 crc kubenswrapper[4922]: I0929 23:47:41.875835 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 29 23:47:42 crc kubenswrapper[4922]: I0929 23:47:42.239988 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 29 23:47:43 crc kubenswrapper[4922]: I0929 23:47:43.127174 4922 generic.go:334] "Generic (PLEG): container finished" podID="7a67df37-889e-4b45-949b-ceaca5ac719e" containerID="7c1d7c6ffa2f9b5d5467b2c3de2ae243f90ea76e013540fb8ed8e64eef16d4de" exitCode=0 Sep 29 23:47:43 crc kubenswrapper[4922]: I0929 23:47:43.127386 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"7a67df37-889e-4b45-949b-ceaca5ac719e","Type":"ContainerDied","Data":"7c1d7c6ffa2f9b5d5467b2c3de2ae243f90ea76e013540fb8ed8e64eef16d4de"} Sep 29 23:47:43 crc kubenswrapper[4922]: I0929 23:47:43.127731 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"7a67df37-889e-4b45-949b-ceaca5ac719e","Type":"ContainerStarted","Data":"191b26dee06613c178bc0daf2b08061d9202ddda4e7aa96a1bf0e3b7e81cb3de"} Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.613487 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.639522 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_7a67df37-889e-4b45-949b-ceaca5ac719e/mariadb-client-5-default/0.log" Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.670057 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.674637 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.739991 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgsmb\" (UniqueName: \"kubernetes.io/projected/7a67df37-889e-4b45-949b-ceaca5ac719e-kube-api-access-mgsmb\") pod \"7a67df37-889e-4b45-949b-ceaca5ac719e\" (UID: \"7a67df37-889e-4b45-949b-ceaca5ac719e\") " Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.748584 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a67df37-889e-4b45-949b-ceaca5ac719e-kube-api-access-mgsmb" (OuterVolumeSpecName: "kube-api-access-mgsmb") pod "7a67df37-889e-4b45-949b-ceaca5ac719e" (UID: "7a67df37-889e-4b45-949b-ceaca5ac719e"). InnerVolumeSpecName "kube-api-access-mgsmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.842251 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgsmb\" (UniqueName: \"kubernetes.io/projected/7a67df37-889e-4b45-949b-ceaca5ac719e-kube-api-access-mgsmb\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.870841 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Sep 29 23:47:44 crc kubenswrapper[4922]: E0929 23:47:44.871386 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a67df37-889e-4b45-949b-ceaca5ac719e" containerName="mariadb-client-5-default" Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.871452 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a67df37-889e-4b45-949b-ceaca5ac719e" containerName="mariadb-client-5-default" Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.871892 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a67df37-889e-4b45-949b-ceaca5ac719e" containerName="mariadb-client-5-default" Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.873021 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 29 23:47:44 crc kubenswrapper[4922]: I0929 23:47:44.887669 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 29 23:47:45 crc kubenswrapper[4922]: I0929 23:47:45.047255 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nprw\" (UniqueName: \"kubernetes.io/projected/d066214d-2b27-4bd0-9fb0-0626c0ed7f6c-kube-api-access-5nprw\") pod \"mariadb-client-6-default\" (UID: \"d066214d-2b27-4bd0-9fb0-0626c0ed7f6c\") " pod="openstack/mariadb-client-6-default" Sep 29 23:47:45 crc kubenswrapper[4922]: I0929 23:47:45.149340 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nprw\" (UniqueName: \"kubernetes.io/projected/d066214d-2b27-4bd0-9fb0-0626c0ed7f6c-kube-api-access-5nprw\") pod \"mariadb-client-6-default\" (UID: \"d066214d-2b27-4bd0-9fb0-0626c0ed7f6c\") " pod="openstack/mariadb-client-6-default" Sep 29 23:47:45 crc kubenswrapper[4922]: I0929 23:47:45.164336 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="191b26dee06613c178bc0daf2b08061d9202ddda4e7aa96a1bf0e3b7e81cb3de" Sep 29 23:47:45 crc kubenswrapper[4922]: I0929 23:47:45.164452 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Sep 29 23:47:45 crc kubenswrapper[4922]: I0929 23:47:45.183849 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nprw\" (UniqueName: \"kubernetes.io/projected/d066214d-2b27-4bd0-9fb0-0626c0ed7f6c-kube-api-access-5nprw\") pod \"mariadb-client-6-default\" (UID: \"d066214d-2b27-4bd0-9fb0-0626c0ed7f6c\") " pod="openstack/mariadb-client-6-default" Sep 29 23:47:45 crc kubenswrapper[4922]: I0929 23:47:45.204574 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 29 23:47:45 crc kubenswrapper[4922]: I0929 23:47:45.541159 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 29 23:47:45 crc kubenswrapper[4922]: W0929 23:47:45.543703 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd066214d_2b27_4bd0_9fb0_0626c0ed7f6c.slice/crio-c63941d2c46b277402a03d6995d5421040a38a3e913390f9a5bd85ae29d17609 WatchSource:0}: Error finding container c63941d2c46b277402a03d6995d5421040a38a3e913390f9a5bd85ae29d17609: Status 404 returned error can't find the container with id c63941d2c46b277402a03d6995d5421040a38a3e913390f9a5bd85ae29d17609 Sep 29 23:47:46 crc kubenswrapper[4922]: I0929 23:47:46.176283 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"d066214d-2b27-4bd0-9fb0-0626c0ed7f6c","Type":"ContainerStarted","Data":"156d66aaa21afacfdfe14f360e674ed778e203c7d2a91d45fe6888cd733d0a18"} Sep 29 23:47:46 crc kubenswrapper[4922]: I0929 23:47:46.176885 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"d066214d-2b27-4bd0-9fb0-0626c0ed7f6c","Type":"ContainerStarted","Data":"c63941d2c46b277402a03d6995d5421040a38a3e913390f9a5bd85ae29d17609"} Sep 29 23:47:46 crc kubenswrapper[4922]: I0929 23:47:46.201908 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=2.201884679 podStartE2EDuration="2.201884679s" podCreationTimestamp="2025-09-29 23:47:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:47:46.197734787 +0000 UTC m=+4870.508023650" watchObservedRunningTime="2025-09-29 23:47:46.201884679 +0000 UTC m=+4870.512173492" Sep 29 23:47:46 crc kubenswrapper[4922]: I0929 23:47:46.433154 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a67df37-889e-4b45-949b-ceaca5ac719e" path="/var/lib/kubelet/pods/7a67df37-889e-4b45-949b-ceaca5ac719e/volumes" Sep 29 23:47:47 crc kubenswrapper[4922]: I0929 23:47:47.188452 4922 generic.go:334] "Generic (PLEG): container finished" podID="d066214d-2b27-4bd0-9fb0-0626c0ed7f6c" containerID="156d66aaa21afacfdfe14f360e674ed778e203c7d2a91d45fe6888cd733d0a18" exitCode=0 Sep 29 23:47:47 crc kubenswrapper[4922]: I0929 23:47:47.188526 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"d066214d-2b27-4bd0-9fb0-0626c0ed7f6c","Type":"ContainerDied","Data":"156d66aaa21afacfdfe14f360e674ed778e203c7d2a91d45fe6888cd733d0a18"} Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.653855 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.693609 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.698699 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.816276 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nprw\" (UniqueName: \"kubernetes.io/projected/d066214d-2b27-4bd0-9fb0-0626c0ed7f6c-kube-api-access-5nprw\") pod \"d066214d-2b27-4bd0-9fb0-0626c0ed7f6c\" (UID: \"d066214d-2b27-4bd0-9fb0-0626c0ed7f6c\") " Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.822886 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d066214d-2b27-4bd0-9fb0-0626c0ed7f6c-kube-api-access-5nprw" (OuterVolumeSpecName: "kube-api-access-5nprw") pod "d066214d-2b27-4bd0-9fb0-0626c0ed7f6c" (UID: "d066214d-2b27-4bd0-9fb0-0626c0ed7f6c"). InnerVolumeSpecName "kube-api-access-5nprw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.898424 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Sep 29 23:47:48 crc kubenswrapper[4922]: E0929 23:47:48.898874 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d066214d-2b27-4bd0-9fb0-0626c0ed7f6c" containerName="mariadb-client-6-default" Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.898898 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d066214d-2b27-4bd0-9fb0-0626c0ed7f6c" containerName="mariadb-client-6-default" Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.899155 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d066214d-2b27-4bd0-9fb0-0626c0ed7f6c" containerName="mariadb-client-6-default" Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.899897 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.908042 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 29 23:47:48 crc kubenswrapper[4922]: I0929 23:47:48.919837 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nprw\" (UniqueName: \"kubernetes.io/projected/d066214d-2b27-4bd0-9fb0-0626c0ed7f6c-kube-api-access-5nprw\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:49 crc kubenswrapper[4922]: I0929 23:47:49.020803 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qvxl\" (UniqueName: \"kubernetes.io/projected/d78ea301-76aa-4f66-920e-4dbd57ca1761-kube-api-access-2qvxl\") pod \"mariadb-client-7-default\" (UID: \"d78ea301-76aa-4f66-920e-4dbd57ca1761\") " pod="openstack/mariadb-client-7-default" Sep 29 23:47:49 crc kubenswrapper[4922]: I0929 23:47:49.123126 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qvxl\" (UniqueName: \"kubernetes.io/projected/d78ea301-76aa-4f66-920e-4dbd57ca1761-kube-api-access-2qvxl\") pod \"mariadb-client-7-default\" (UID: \"d78ea301-76aa-4f66-920e-4dbd57ca1761\") " pod="openstack/mariadb-client-7-default" Sep 29 23:47:49 crc kubenswrapper[4922]: I0929 23:47:49.142695 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qvxl\" (UniqueName: \"kubernetes.io/projected/d78ea301-76aa-4f66-920e-4dbd57ca1761-kube-api-access-2qvxl\") pod \"mariadb-client-7-default\" (UID: \"d78ea301-76aa-4f66-920e-4dbd57ca1761\") " pod="openstack/mariadb-client-7-default" Sep 29 23:47:49 crc kubenswrapper[4922]: I0929 23:47:49.211318 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c63941d2c46b277402a03d6995d5421040a38a3e913390f9a5bd85ae29d17609" Sep 29 23:47:49 crc kubenswrapper[4922]: I0929 23:47:49.211439 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Sep 29 23:47:49 crc kubenswrapper[4922]: I0929 23:47:49.256611 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 29 23:47:49 crc kubenswrapper[4922]: I0929 23:47:49.606654 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 29 23:47:49 crc kubenswrapper[4922]: W0929 23:47:49.613679 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd78ea301_76aa_4f66_920e_4dbd57ca1761.slice/crio-c43f793d8d357f1bd93f9ddc96b93adfc72438433afc16cd5e108390bbedd9b8 WatchSource:0}: Error finding container c43f793d8d357f1bd93f9ddc96b93adfc72438433afc16cd5e108390bbedd9b8: Status 404 returned error can't find the container with id c43f793d8d357f1bd93f9ddc96b93adfc72438433afc16cd5e108390bbedd9b8 Sep 29 23:47:50 crc kubenswrapper[4922]: I0929 23:47:50.223441 4922 generic.go:334] "Generic (PLEG): container finished" podID="d78ea301-76aa-4f66-920e-4dbd57ca1761" containerID="bb2fbad2208fca6fa3f70088358f2fcdaed36eb3c4980e6ff6947290ef2f9fd2" exitCode=0 Sep 29 23:47:50 crc kubenswrapper[4922]: I0929 23:47:50.223494 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"d78ea301-76aa-4f66-920e-4dbd57ca1761","Type":"ContainerDied","Data":"bb2fbad2208fca6fa3f70088358f2fcdaed36eb3c4980e6ff6947290ef2f9fd2"} Sep 29 23:47:50 crc kubenswrapper[4922]: I0929 23:47:50.223526 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"d78ea301-76aa-4f66-920e-4dbd57ca1761","Type":"ContainerStarted","Data":"c43f793d8d357f1bd93f9ddc96b93adfc72438433afc16cd5e108390bbedd9b8"} Sep 29 23:47:50 crc kubenswrapper[4922]: I0929 23:47:50.434383 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d066214d-2b27-4bd0-9fb0-0626c0ed7f6c" path="/var/lib/kubelet/pods/d066214d-2b27-4bd0-9fb0-0626c0ed7f6c/volumes" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.746286 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.772885 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_d78ea301-76aa-4f66-920e-4dbd57ca1761/mariadb-client-7-default/0.log" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.800713 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qvxl\" (UniqueName: \"kubernetes.io/projected/d78ea301-76aa-4f66-920e-4dbd57ca1761-kube-api-access-2qvxl\") pod \"d78ea301-76aa-4f66-920e-4dbd57ca1761\" (UID: \"d78ea301-76aa-4f66-920e-4dbd57ca1761\") " Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.812838 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d78ea301-76aa-4f66-920e-4dbd57ca1761-kube-api-access-2qvxl" (OuterVolumeSpecName: "kube-api-access-2qvxl") pod "d78ea301-76aa-4f66-920e-4dbd57ca1761" (UID: "d78ea301-76aa-4f66-920e-4dbd57ca1761"). InnerVolumeSpecName "kube-api-access-2qvxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.816160 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.826240 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.840233 4922 scope.go:117] "RemoveContainer" containerID="b9919a7da1b8d3055262e0f45c29ec6337384e5add24e1ebc579e6b70f13b5c9" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.903201 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qvxl\" (UniqueName: \"kubernetes.io/projected/d78ea301-76aa-4f66-920e-4dbd57ca1761-kube-api-access-2qvxl\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.974769 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Sep 29 23:47:51 crc kubenswrapper[4922]: E0929 23:47:51.975806 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d78ea301-76aa-4f66-920e-4dbd57ca1761" containerName="mariadb-client-7-default" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.975841 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d78ea301-76aa-4f66-920e-4dbd57ca1761" containerName="mariadb-client-7-default" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.976176 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d78ea301-76aa-4f66-920e-4dbd57ca1761" containerName="mariadb-client-7-default" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.977347 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 29 23:47:51 crc kubenswrapper[4922]: I0929 23:47:51.996634 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Sep 29 23:47:52 crc kubenswrapper[4922]: I0929 23:47:52.005218 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njncr\" (UniqueName: \"kubernetes.io/projected/9ffcad5c-9978-48d1-b721-f81ad527b13a-kube-api-access-njncr\") pod \"mariadb-client-2\" (UID: \"9ffcad5c-9978-48d1-b721-f81ad527b13a\") " pod="openstack/mariadb-client-2" Sep 29 23:47:52 crc kubenswrapper[4922]: I0929 23:47:52.106747 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njncr\" (UniqueName: \"kubernetes.io/projected/9ffcad5c-9978-48d1-b721-f81ad527b13a-kube-api-access-njncr\") pod \"mariadb-client-2\" (UID: \"9ffcad5c-9978-48d1-b721-f81ad527b13a\") " pod="openstack/mariadb-client-2" Sep 29 23:47:52 crc kubenswrapper[4922]: I0929 23:47:52.137260 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njncr\" (UniqueName: \"kubernetes.io/projected/9ffcad5c-9978-48d1-b721-f81ad527b13a-kube-api-access-njncr\") pod \"mariadb-client-2\" (UID: \"9ffcad5c-9978-48d1-b721-f81ad527b13a\") " pod="openstack/mariadb-client-2" Sep 29 23:47:52 crc kubenswrapper[4922]: I0929 23:47:52.248541 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c43f793d8d357f1bd93f9ddc96b93adfc72438433afc16cd5e108390bbedd9b8" Sep 29 23:47:52 crc kubenswrapper[4922]: I0929 23:47:52.248640 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Sep 29 23:47:52 crc kubenswrapper[4922]: I0929 23:47:52.309949 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 29 23:47:52 crc kubenswrapper[4922]: I0929 23:47:52.448850 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d78ea301-76aa-4f66-920e-4dbd57ca1761" path="/var/lib/kubelet/pods/d78ea301-76aa-4f66-920e-4dbd57ca1761/volumes" Sep 29 23:47:52 crc kubenswrapper[4922]: I0929 23:47:52.879736 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Sep 29 23:47:52 crc kubenswrapper[4922]: W0929 23:47:52.897685 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ffcad5c_9978_48d1_b721_f81ad527b13a.slice/crio-1cca711d566b0c36636d9070b91de6c73f962e8b620c3fed52bb7a80216c13a3 WatchSource:0}: Error finding container 1cca711d566b0c36636d9070b91de6c73f962e8b620c3fed52bb7a80216c13a3: Status 404 returned error can't find the container with id 1cca711d566b0c36636d9070b91de6c73f962e8b620c3fed52bb7a80216c13a3 Sep 29 23:47:53 crc kubenswrapper[4922]: I0929 23:47:53.263369 4922 generic.go:334] "Generic (PLEG): container finished" podID="9ffcad5c-9978-48d1-b721-f81ad527b13a" containerID="22bb54e8b6b8c1045048b2f06696c872910b1ff91fb2b466c51a4289191254fd" exitCode=0 Sep 29 23:47:53 crc kubenswrapper[4922]: I0929 23:47:53.263468 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"9ffcad5c-9978-48d1-b721-f81ad527b13a","Type":"ContainerDied","Data":"22bb54e8b6b8c1045048b2f06696c872910b1ff91fb2b466c51a4289191254fd"} Sep 29 23:47:53 crc kubenswrapper[4922]: I0929 23:47:53.263540 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"9ffcad5c-9978-48d1-b721-f81ad527b13a","Type":"ContainerStarted","Data":"1cca711d566b0c36636d9070b91de6c73f962e8b620c3fed52bb7a80216c13a3"} Sep 29 23:47:54 crc kubenswrapper[4922]: I0929 23:47:54.742421 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 29 23:47:54 crc kubenswrapper[4922]: I0929 23:47:54.758419 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njncr\" (UniqueName: \"kubernetes.io/projected/9ffcad5c-9978-48d1-b721-f81ad527b13a-kube-api-access-njncr\") pod \"9ffcad5c-9978-48d1-b721-f81ad527b13a\" (UID: \"9ffcad5c-9978-48d1-b721-f81ad527b13a\") " Sep 29 23:47:54 crc kubenswrapper[4922]: I0929 23:47:54.767702 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_9ffcad5c-9978-48d1-b721-f81ad527b13a/mariadb-client-2/0.log" Sep 29 23:47:54 crc kubenswrapper[4922]: I0929 23:47:54.770542 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ffcad5c-9978-48d1-b721-f81ad527b13a-kube-api-access-njncr" (OuterVolumeSpecName: "kube-api-access-njncr") pod "9ffcad5c-9978-48d1-b721-f81ad527b13a" (UID: "9ffcad5c-9978-48d1-b721-f81ad527b13a"). InnerVolumeSpecName "kube-api-access-njncr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:47:54 crc kubenswrapper[4922]: I0929 23:47:54.804558 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Sep 29 23:47:54 crc kubenswrapper[4922]: I0929 23:47:54.809163 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Sep 29 23:47:54 crc kubenswrapper[4922]: I0929 23:47:54.868776 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njncr\" (UniqueName: \"kubernetes.io/projected/9ffcad5c-9978-48d1-b721-f81ad527b13a-kube-api-access-njncr\") on node \"crc\" DevicePath \"\"" Sep 29 23:47:55 crc kubenswrapper[4922]: I0929 23:47:55.295044 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cca711d566b0c36636d9070b91de6c73f962e8b620c3fed52bb7a80216c13a3" Sep 29 23:47:55 crc kubenswrapper[4922]: I0929 23:47:55.295148 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Sep 29 23:47:56 crc kubenswrapper[4922]: I0929 23:47:56.438303 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ffcad5c-9978-48d1-b721-f81ad527b13a" path="/var/lib/kubelet/pods/9ffcad5c-9978-48d1-b721-f81ad527b13a/volumes" Sep 29 23:49:58 crc kubenswrapper[4922]: I0929 23:49:58.912880 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:49:58 crc kubenswrapper[4922]: I0929 23:49:58.915529 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:50:28 crc kubenswrapper[4922]: I0929 23:50:28.912731 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:50:28 crc kubenswrapper[4922]: I0929 23:50:28.913372 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:50:58 crc kubenswrapper[4922]: I0929 23:50:58.913034 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:50:58 crc kubenswrapper[4922]: I0929 23:50:58.913710 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:50:58 crc kubenswrapper[4922]: I0929 23:50:58.913773 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:50:58 crc kubenswrapper[4922]: I0929 23:50:58.914699 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ff9f576c4f920051ee029d64618cf6e7e349c0b190d881ecf9094b87fe73dac7"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:50:58 crc kubenswrapper[4922]: I0929 23:50:58.914799 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://ff9f576c4f920051ee029d64618cf6e7e349c0b190d881ecf9094b87fe73dac7" gracePeriod=600 Sep 29 23:50:59 crc kubenswrapper[4922]: I0929 23:50:59.204645 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="ff9f576c4f920051ee029d64618cf6e7e349c0b190d881ecf9094b87fe73dac7" exitCode=0 Sep 29 23:50:59 crc kubenswrapper[4922]: I0929 23:50:59.204690 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"ff9f576c4f920051ee029d64618cf6e7e349c0b190d881ecf9094b87fe73dac7"} Sep 29 23:50:59 crc kubenswrapper[4922]: I0929 23:50:59.204720 4922 scope.go:117] "RemoveContainer" containerID="7b79276f581736e03c43d403fd3e364b97764eba106312268297b6074c9464ca" Sep 29 23:51:00 crc kubenswrapper[4922]: I0929 23:51:00.219710 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7"} Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.051153 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Sep 29 23:53:10 crc kubenswrapper[4922]: E0929 23:53:10.052141 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ffcad5c-9978-48d1-b721-f81ad527b13a" containerName="mariadb-client-2" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.052161 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ffcad5c-9978-48d1-b721-f81ad527b13a" containerName="mariadb-client-2" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.052411 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ffcad5c-9978-48d1-b721-f81ad527b13a" containerName="mariadb-client-2" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.053155 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.057455 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-nb56t" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.060756 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.211323 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\") pod \"mariadb-copy-data\" (UID: \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\") " pod="openstack/mariadb-copy-data" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.211495 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vx6j\" (UniqueName: \"kubernetes.io/projected/0eaa79ab-916d-4190-a6cf-fc62fb86f89a-kube-api-access-7vx6j\") pod \"mariadb-copy-data\" (UID: \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\") " pod="openstack/mariadb-copy-data" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.313474 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vx6j\" (UniqueName: \"kubernetes.io/projected/0eaa79ab-916d-4190-a6cf-fc62fb86f89a-kube-api-access-7vx6j\") pod \"mariadb-copy-data\" (UID: \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\") " pod="openstack/mariadb-copy-data" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.313590 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\") pod \"mariadb-copy-data\" (UID: \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\") " pod="openstack/mariadb-copy-data" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.316432 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.316508 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\") pod \"mariadb-copy-data\" (UID: \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5079c5ea43a8e44544d946f353ef8806bcd3b4f74f3709ff92521e7c30639943/globalmount\"" pod="openstack/mariadb-copy-data" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.354172 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vx6j\" (UniqueName: \"kubernetes.io/projected/0eaa79ab-916d-4190-a6cf-fc62fb86f89a-kube-api-access-7vx6j\") pod \"mariadb-copy-data\" (UID: \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\") " pod="openstack/mariadb-copy-data" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.361138 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\") pod \"mariadb-copy-data\" (UID: \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\") " pod="openstack/mariadb-copy-data" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.383123 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Sep 29 23:53:10 crc kubenswrapper[4922]: I0929 23:53:10.926339 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Sep 29 23:53:11 crc kubenswrapper[4922]: I0929 23:53:11.547915 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"0eaa79ab-916d-4190-a6cf-fc62fb86f89a","Type":"ContainerStarted","Data":"beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7"} Sep 29 23:53:11 crc kubenswrapper[4922]: I0929 23:53:11.548383 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"0eaa79ab-916d-4190-a6cf-fc62fb86f89a","Type":"ContainerStarted","Data":"dc09b6625b6e72f4bd495cac7861f139e54c5ed05aaadc1015212799976a89fb"} Sep 29 23:53:11 crc kubenswrapper[4922]: I0929 23:53:11.579222 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=2.579196284 podStartE2EDuration="2.579196284s" podCreationTimestamp="2025-09-29 23:53:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:53:11.566076591 +0000 UTC m=+5195.876365484" watchObservedRunningTime="2025-09-29 23:53:11.579196284 +0000 UTC m=+5195.889485137" Sep 29 23:53:13 crc kubenswrapper[4922]: I0929 23:53:13.605541 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:13 crc kubenswrapper[4922]: I0929 23:53:13.608060 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 29 23:53:13 crc kubenswrapper[4922]: I0929 23:53:13.619265 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:13 crc kubenswrapper[4922]: I0929 23:53:13.674823 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l42r7\" (UniqueName: \"kubernetes.io/projected/5f38cecf-c768-4e99-9cc1-b01129a3ad77-kube-api-access-l42r7\") pod \"mariadb-client\" (UID: \"5f38cecf-c768-4e99-9cc1-b01129a3ad77\") " pod="openstack/mariadb-client" Sep 29 23:53:13 crc kubenswrapper[4922]: I0929 23:53:13.776773 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l42r7\" (UniqueName: \"kubernetes.io/projected/5f38cecf-c768-4e99-9cc1-b01129a3ad77-kube-api-access-l42r7\") pod \"mariadb-client\" (UID: \"5f38cecf-c768-4e99-9cc1-b01129a3ad77\") " pod="openstack/mariadb-client" Sep 29 23:53:13 crc kubenswrapper[4922]: I0929 23:53:13.816016 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l42r7\" (UniqueName: \"kubernetes.io/projected/5f38cecf-c768-4e99-9cc1-b01129a3ad77-kube-api-access-l42r7\") pod \"mariadb-client\" (UID: \"5f38cecf-c768-4e99-9cc1-b01129a3ad77\") " pod="openstack/mariadb-client" Sep 29 23:53:13 crc kubenswrapper[4922]: I0929 23:53:13.991605 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 29 23:53:14 crc kubenswrapper[4922]: W0929 23:53:14.430422 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f38cecf_c768_4e99_9cc1_b01129a3ad77.slice/crio-ec6f16c3e773decbb9ac24237218183e0a33bfb3ae1a9383b86a4ac420c24f7b WatchSource:0}: Error finding container ec6f16c3e773decbb9ac24237218183e0a33bfb3ae1a9383b86a4ac420c24f7b: Status 404 returned error can't find the container with id ec6f16c3e773decbb9ac24237218183e0a33bfb3ae1a9383b86a4ac420c24f7b Sep 29 23:53:14 crc kubenswrapper[4922]: I0929 23:53:14.435983 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:14 crc kubenswrapper[4922]: I0929 23:53:14.576608 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5f38cecf-c768-4e99-9cc1-b01129a3ad77","Type":"ContainerStarted","Data":"ec6f16c3e773decbb9ac24237218183e0a33bfb3ae1a9383b86a4ac420c24f7b"} Sep 29 23:53:15 crc kubenswrapper[4922]: I0929 23:53:15.586886 4922 generic.go:334] "Generic (PLEG): container finished" podID="5f38cecf-c768-4e99-9cc1-b01129a3ad77" containerID="468c209441a7e43092fe1f4305e54769bb77e8ce1c01ff5b2f218dd404cb1d12" exitCode=0 Sep 29 23:53:15 crc kubenswrapper[4922]: I0929 23:53:15.586935 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"5f38cecf-c768-4e99-9cc1-b01129a3ad77","Type":"ContainerDied","Data":"468c209441a7e43092fe1f4305e54769bb77e8ce1c01ff5b2f218dd404cb1d12"} Sep 29 23:53:16 crc kubenswrapper[4922]: I0929 23:53:16.951725 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 29 23:53:16 crc kubenswrapper[4922]: I0929 23:53:16.973566 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_5f38cecf-c768-4e99-9cc1-b01129a3ad77/mariadb-client/0.log" Sep 29 23:53:16 crc kubenswrapper[4922]: I0929 23:53:16.997750 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.002622 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.047604 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l42r7\" (UniqueName: \"kubernetes.io/projected/5f38cecf-c768-4e99-9cc1-b01129a3ad77-kube-api-access-l42r7\") pod \"5f38cecf-c768-4e99-9cc1-b01129a3ad77\" (UID: \"5f38cecf-c768-4e99-9cc1-b01129a3ad77\") " Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.053638 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f38cecf-c768-4e99-9cc1-b01129a3ad77-kube-api-access-l42r7" (OuterVolumeSpecName: "kube-api-access-l42r7") pod "5f38cecf-c768-4e99-9cc1-b01129a3ad77" (UID: "5f38cecf-c768-4e99-9cc1-b01129a3ad77"). InnerVolumeSpecName "kube-api-access-l42r7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.140507 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:17 crc kubenswrapper[4922]: E0929 23:53:17.141280 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f38cecf-c768-4e99-9cc1-b01129a3ad77" containerName="mariadb-client" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.141301 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f38cecf-c768-4e99-9cc1-b01129a3ad77" containerName="mariadb-client" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.141803 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f38cecf-c768-4e99-9cc1-b01129a3ad77" containerName="mariadb-client" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.142695 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.159487 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.161673 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l42r7\" (UniqueName: \"kubernetes.io/projected/5f38cecf-c768-4e99-9cc1-b01129a3ad77-kube-api-access-l42r7\") on node \"crc\" DevicePath \"\"" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.263111 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m72z5\" (UniqueName: \"kubernetes.io/projected/ccf3927d-aaf5-438c-8419-4aa2d174c69d-kube-api-access-m72z5\") pod \"mariadb-client\" (UID: \"ccf3927d-aaf5-438c-8419-4aa2d174c69d\") " pod="openstack/mariadb-client" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.365188 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m72z5\" (UniqueName: \"kubernetes.io/projected/ccf3927d-aaf5-438c-8419-4aa2d174c69d-kube-api-access-m72z5\") pod \"mariadb-client\" (UID: \"ccf3927d-aaf5-438c-8419-4aa2d174c69d\") " pod="openstack/mariadb-client" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.402965 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m72z5\" (UniqueName: \"kubernetes.io/projected/ccf3927d-aaf5-438c-8419-4aa2d174c69d-kube-api-access-m72z5\") pod \"mariadb-client\" (UID: \"ccf3927d-aaf5-438c-8419-4aa2d174c69d\") " pod="openstack/mariadb-client" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.475935 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.606119 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec6f16c3e773decbb9ac24237218183e0a33bfb3ae1a9383b86a4ac420c24f7b" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.606244 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 29 23:53:17 crc kubenswrapper[4922]: I0929 23:53:17.626377 4922 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="5f38cecf-c768-4e99-9cc1-b01129a3ad77" podUID="ccf3927d-aaf5-438c-8419-4aa2d174c69d" Sep 29 23:53:18 crc kubenswrapper[4922]: W0929 23:53:18.009845 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podccf3927d_aaf5_438c_8419_4aa2d174c69d.slice/crio-98a94ea4cd4777c12fdd50fda5f9ad8a83f18be28ecd18c3f38d7175929f86ab WatchSource:0}: Error finding container 98a94ea4cd4777c12fdd50fda5f9ad8a83f18be28ecd18c3f38d7175929f86ab: Status 404 returned error can't find the container with id 98a94ea4cd4777c12fdd50fda5f9ad8a83f18be28ecd18c3f38d7175929f86ab Sep 29 23:53:18 crc kubenswrapper[4922]: I0929 23:53:18.010774 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:18 crc kubenswrapper[4922]: I0929 23:53:18.431078 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f38cecf-c768-4e99-9cc1-b01129a3ad77" path="/var/lib/kubelet/pods/5f38cecf-c768-4e99-9cc1-b01129a3ad77/volumes" Sep 29 23:53:18 crc kubenswrapper[4922]: I0929 23:53:18.621088 4922 generic.go:334] "Generic (PLEG): container finished" podID="ccf3927d-aaf5-438c-8419-4aa2d174c69d" containerID="1759d7fcd3276a90c52798bb1a8ebf9ee28471db0aeed02e7010104ec33c86ec" exitCode=0 Sep 29 23:53:18 crc kubenswrapper[4922]: I0929 23:53:18.621153 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"ccf3927d-aaf5-438c-8419-4aa2d174c69d","Type":"ContainerDied","Data":"1759d7fcd3276a90c52798bb1a8ebf9ee28471db0aeed02e7010104ec33c86ec"} Sep 29 23:53:18 crc kubenswrapper[4922]: I0929 23:53:18.621654 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"ccf3927d-aaf5-438c-8419-4aa2d174c69d","Type":"ContainerStarted","Data":"98a94ea4cd4777c12fdd50fda5f9ad8a83f18be28ecd18c3f38d7175929f86ab"} Sep 29 23:53:19 crc kubenswrapper[4922]: I0929 23:53:19.989187 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 29 23:53:20 crc kubenswrapper[4922]: I0929 23:53:20.017788 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_ccf3927d-aaf5-438c-8419-4aa2d174c69d/mariadb-client/0.log" Sep 29 23:53:20 crc kubenswrapper[4922]: I0929 23:53:20.045635 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:20 crc kubenswrapper[4922]: I0929 23:53:20.050477 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Sep 29 23:53:20 crc kubenswrapper[4922]: I0929 23:53:20.110886 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m72z5\" (UniqueName: \"kubernetes.io/projected/ccf3927d-aaf5-438c-8419-4aa2d174c69d-kube-api-access-m72z5\") pod \"ccf3927d-aaf5-438c-8419-4aa2d174c69d\" (UID: \"ccf3927d-aaf5-438c-8419-4aa2d174c69d\") " Sep 29 23:53:20 crc kubenswrapper[4922]: I0929 23:53:20.117065 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccf3927d-aaf5-438c-8419-4aa2d174c69d-kube-api-access-m72z5" (OuterVolumeSpecName: "kube-api-access-m72z5") pod "ccf3927d-aaf5-438c-8419-4aa2d174c69d" (UID: "ccf3927d-aaf5-438c-8419-4aa2d174c69d"). InnerVolumeSpecName "kube-api-access-m72z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:53:20 crc kubenswrapper[4922]: I0929 23:53:20.212943 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m72z5\" (UniqueName: \"kubernetes.io/projected/ccf3927d-aaf5-438c-8419-4aa2d174c69d-kube-api-access-m72z5\") on node \"crc\" DevicePath \"\"" Sep 29 23:53:20 crc kubenswrapper[4922]: I0929 23:53:20.437101 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccf3927d-aaf5-438c-8419-4aa2d174c69d" path="/var/lib/kubelet/pods/ccf3927d-aaf5-438c-8419-4aa2d174c69d/volumes" Sep 29 23:53:20 crc kubenswrapper[4922]: I0929 23:53:20.642078 4922 scope.go:117] "RemoveContainer" containerID="1759d7fcd3276a90c52798bb1a8ebf9ee28471db0aeed02e7010104ec33c86ec" Sep 29 23:53:20 crc kubenswrapper[4922]: I0929 23:53:20.642141 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Sep 29 23:53:28 crc kubenswrapper[4922]: I0929 23:53:28.912793 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:53:28 crc kubenswrapper[4922]: I0929 23:53:28.913579 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:53:52 crc kubenswrapper[4922]: I0929 23:53:52.090039 4922 scope.go:117] "RemoveContainer" containerID="7c1d7c6ffa2f9b5d5467b2c3de2ae243f90ea76e013540fb8ed8e64eef16d4de" Sep 29 23:53:52 crc kubenswrapper[4922]: I0929 23:53:52.111454 4922 scope.go:117] "RemoveContainer" containerID="156d66aaa21afacfdfe14f360e674ed778e203c7d2a91d45fe6888cd733d0a18" Sep 29 23:53:52 crc kubenswrapper[4922]: I0929 23:53:52.144630 4922 scope.go:117] "RemoveContainer" containerID="e9d5079334c9f729cba6e119789c518d9583c70876a4a79141d21b8b29d58cf2" Sep 29 23:53:52 crc kubenswrapper[4922]: I0929 23:53:52.171302 4922 scope.go:117] "RemoveContainer" containerID="94ce9a15e6aaf2abf2622537dcc880b3a68661e84e94966b8b67abddfb72c52f" Sep 29 23:53:52 crc kubenswrapper[4922]: I0929 23:53:52.200062 4922 scope.go:117] "RemoveContainer" containerID="ab94c340f8adaf3757f6cb0fa1eaf9a0a246c6167f173931d1d13c4140d87c20" Sep 29 23:53:52 crc kubenswrapper[4922]: I0929 23:53:52.241025 4922 scope.go:117] "RemoveContainer" containerID="5a3ae0a7424d21de8919f5a6d999bf3b283f9d3e181302bc9cf687b70790df5c" Sep 29 23:53:52 crc kubenswrapper[4922]: I0929 23:53:52.262377 4922 scope.go:117] "RemoveContainer" containerID="bb2fbad2208fca6fa3f70088358f2fcdaed36eb3c4980e6ff6947290ef2f9fd2" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.800436 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 23:53:53 crc kubenswrapper[4922]: E0929 23:53:53.800940 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccf3927d-aaf5-438c-8419-4aa2d174c69d" containerName="mariadb-client" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.800963 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccf3927d-aaf5-438c-8419-4aa2d174c69d" containerName="mariadb-client" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.801231 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccf3927d-aaf5-438c-8419-4aa2d174c69d" containerName="mariadb-client" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.803172 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.808781 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.809382 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.809557 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-xzmvj" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.820227 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.823454 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.845506 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.861046 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.864594 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.873031 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.891454 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905321 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/51103d86-5353-4cb5-97ab-c287700eb9ec-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905370 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-047d5086-6ff3-4d1a-8d5d-a1b1ebb28d03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-047d5086-6ff3-4d1a-8d5d-a1b1ebb28d03\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905406 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sxj8\" (UniqueName: \"kubernetes.io/projected/4f278e0d-047d-4387-8910-64bb296a8565-kube-api-access-6sxj8\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905431 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4f278e0d-047d-4387-8910-64bb296a8565-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905448 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51103d86-5353-4cb5-97ab-c287700eb9ec-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905527 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51103d86-5353-4cb5-97ab-c287700eb9ec-config\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905605 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-90b4ede2-1943-4183-a36b-196bced264cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-90b4ede2-1943-4183-a36b-196bced264cd\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905657 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f278e0d-047d-4387-8910-64bb296a8565-config\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905784 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51103d86-5353-4cb5-97ab-c287700eb9ec-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905819 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f278e0d-047d-4387-8910-64bb296a8565-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905922 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4f278e0d-047d-4387-8910-64bb296a8565-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.905963 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghmg2\" (UniqueName: \"kubernetes.io/projected/51103d86-5353-4cb5-97ab-c287700eb9ec-kube-api-access-ghmg2\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:53 crc kubenswrapper[4922]: I0929 23:53:53.998139 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.008502 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.008908 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-808b22e4-16ea-410b-9656-1c1c9bb499c3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-808b22e4-16ea-410b-9656-1c1c9bb499c3\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.008962 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-90b4ede2-1943-4183-a36b-196bced264cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-90b4ede2-1943-4183-a36b-196bced264cd\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.012352 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.013065 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.013344 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f278e0d-047d-4387-8910-64bb296a8565-config\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.013470 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51103d86-5353-4cb5-97ab-c287700eb9ec-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.013513 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f278e0d-047d-4387-8910-64bb296a8565-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.013585 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4f278e0d-047d-4387-8910-64bb296a8565-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.013739 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghmg2\" (UniqueName: \"kubernetes.io/projected/51103d86-5353-4cb5-97ab-c287700eb9ec-kube-api-access-ghmg2\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.013877 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/51103d86-5353-4cb5-97ab-c287700eb9ec-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.013971 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fp4c\" (UniqueName: \"kubernetes.io/projected/9069ed46-83fb-4a1e-9422-c87634b81112-kube-api-access-6fp4c\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014048 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9069ed46-83fb-4a1e-9422-c87634b81112-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014112 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-047d5086-6ff3-4d1a-8d5d-a1b1ebb28d03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-047d5086-6ff3-4d1a-8d5d-a1b1ebb28d03\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014188 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sxj8\" (UniqueName: \"kubernetes.io/projected/4f278e0d-047d-4387-8910-64bb296a8565-kube-api-access-6sxj8\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014248 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9069ed46-83fb-4a1e-9422-c87634b81112-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014309 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9069ed46-83fb-4a1e-9422-c87634b81112-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014432 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4f278e0d-047d-4387-8910-64bb296a8565-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014500 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51103d86-5353-4cb5-97ab-c287700eb9ec-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014569 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9069ed46-83fb-4a1e-9422-c87634b81112-config\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014636 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51103d86-5353-4cb5-97ab-c287700eb9ec-config\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.014923 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4f278e0d-047d-4387-8910-64bb296a8565-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.015174 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f278e0d-047d-4387-8910-64bb296a8565-config\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.016463 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51103d86-5353-4cb5-97ab-c287700eb9ec-config\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.016897 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-jw6fn" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.017089 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/51103d86-5353-4cb5-97ab-c287700eb9ec-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.017188 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/51103d86-5353-4cb5-97ab-c287700eb9ec-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.017210 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4f278e0d-047d-4387-8910-64bb296a8565-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.017254 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.022666 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.023070 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-047d5086-6ff3-4d1a-8d5d-a1b1ebb28d03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-047d5086-6ff3-4d1a-8d5d-a1b1ebb28d03\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/99153dc0693b0bd2d15fd292f00c5fc36a128c8692ac57fd2f996501eec21a91/globalmount\"" pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.025955 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.026000 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-90b4ede2-1943-4183-a36b-196bced264cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-90b4ede2-1943-4183-a36b-196bced264cd\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/669227044e108bb3b85b0a80606d5ce0c57055aa1c3e03b7227c3bd1d9685a4d/globalmount\"" pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.033603 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f278e0d-047d-4387-8910-64bb296a8565-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.041121 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51103d86-5353-4cb5-97ab-c287700eb9ec-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.054045 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.054553 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sxj8\" (UniqueName: \"kubernetes.io/projected/4f278e0d-047d-4387-8910-64bb296a8565-kube-api-access-6sxj8\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.056787 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghmg2\" (UniqueName: \"kubernetes.io/projected/51103d86-5353-4cb5-97ab-c287700eb9ec-kube-api-access-ghmg2\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.057851 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.080066 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.083344 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.092160 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-90b4ede2-1943-4183-a36b-196bced264cd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-90b4ede2-1943-4183-a36b-196bced264cd\") pod \"ovsdbserver-nb-0\" (UID: \"51103d86-5353-4cb5-97ab-c287700eb9ec\") " pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.092385 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.100970 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.118828 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fp4c\" (UniqueName: \"kubernetes.io/projected/9069ed46-83fb-4a1e-9422-c87634b81112-kube-api-access-6fp4c\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.118939 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9069ed46-83fb-4a1e-9422-c87634b81112-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.118985 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9069ed46-83fb-4a1e-9422-c87634b81112-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.119019 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9069ed46-83fb-4a1e-9422-c87634b81112-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.119058 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9069ed46-83fb-4a1e-9422-c87634b81112-config\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.119094 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-808b22e4-16ea-410b-9656-1c1c9bb499c3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-808b22e4-16ea-410b-9656-1c1c9bb499c3\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.120660 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9069ed46-83fb-4a1e-9422-c87634b81112-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.121443 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9069ed46-83fb-4a1e-9422-c87634b81112-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.122165 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9069ed46-83fb-4a1e-9422-c87634b81112-config\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.124830 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.124935 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-808b22e4-16ea-410b-9656-1c1c9bb499c3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-808b22e4-16ea-410b-9656-1c1c9bb499c3\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/9990be66e8089fc6cd5fe3b669291a04316496b127519724a61abd966d25b046/globalmount\"" pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.133093 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-047d5086-6ff3-4d1a-8d5d-a1b1ebb28d03\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-047d5086-6ff3-4d1a-8d5d-a1b1ebb28d03\") pod \"ovsdbserver-nb-2\" (UID: \"4f278e0d-047d-4387-8910-64bb296a8565\") " pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.135557 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9069ed46-83fb-4a1e-9422-c87634b81112-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.137769 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fp4c\" (UniqueName: \"kubernetes.io/projected/9069ed46-83fb-4a1e-9422-c87634b81112-kube-api-access-6fp4c\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.163867 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.165287 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-808b22e4-16ea-410b-9656-1c1c9bb499c3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-808b22e4-16ea-410b-9656-1c1c9bb499c3\") pod \"ovsdbserver-nb-1\" (UID: \"9069ed46-83fb-4a1e-9422-c87634b81112\") " pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.180319 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.187869 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220674 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8f8ce5c0-a133-42da-970a-68ab50238b2a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8f8ce5c0-a133-42da-970a-68ab50238b2a\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220726 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c3a574b-04b2-4672-9810-581f6ad101d5-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220745 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6c3a574b-04b2-4672-9810-581f6ad101d5-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220775 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98483887-cf9b-4246-ac12-4d38dae5acd0-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220808 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvhcw\" (UniqueName: \"kubernetes.io/projected/98483887-cf9b-4246-ac12-4d38dae5acd0-kube-api-access-qvhcw\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220830 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6c3a574b-04b2-4672-9810-581f6ad101d5-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220892 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-76c26498-a42e-4a98-8a1d-bfaffa277b5e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-76c26498-a42e-4a98-8a1d-bfaffa277b5e\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220908 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98483887-cf9b-4246-ac12-4d38dae5acd0-config\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220926 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2961879a-d786-4763-8353-554b884a741d-config\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220943 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2961879a-d786-4763-8353-554b884a741d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220960 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2961879a-d786-4763-8353-554b884a741d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220977 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbvkk\" (UniqueName: \"kubernetes.io/projected/6c3a574b-04b2-4672-9810-581f6ad101d5-kube-api-access-fbvkk\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.220993 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c3a574b-04b2-4672-9810-581f6ad101d5-config\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.221010 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2961879a-d786-4763-8353-554b884a741d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.221042 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8gkr\" (UniqueName: \"kubernetes.io/projected/2961879a-d786-4763-8353-554b884a741d-kube-api-access-g8gkr\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.221062 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98483887-cf9b-4246-ac12-4d38dae5acd0-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.221082 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/98483887-cf9b-4246-ac12-4d38dae5acd0-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.221109 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5070a1d2-3ba9-424b-a938-527cd8a02d0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5070a1d2-3ba9-424b-a938-527cd8a02d0d\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.322582 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/98483887-cf9b-4246-ac12-4d38dae5acd0-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.322957 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5070a1d2-3ba9-424b-a938-527cd8a02d0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5070a1d2-3ba9-424b-a938-527cd8a02d0d\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.322987 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8f8ce5c0-a133-42da-970a-68ab50238b2a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8f8ce5c0-a133-42da-970a-68ab50238b2a\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323008 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c3a574b-04b2-4672-9810-581f6ad101d5-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323025 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6c3a574b-04b2-4672-9810-581f6ad101d5-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323053 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98483887-cf9b-4246-ac12-4d38dae5acd0-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323078 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/98483887-cf9b-4246-ac12-4d38dae5acd0-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323084 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvhcw\" (UniqueName: \"kubernetes.io/projected/98483887-cf9b-4246-ac12-4d38dae5acd0-kube-api-access-qvhcw\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323471 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6c3a574b-04b2-4672-9810-581f6ad101d5-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323509 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-76c26498-a42e-4a98-8a1d-bfaffa277b5e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-76c26498-a42e-4a98-8a1d-bfaffa277b5e\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323524 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98483887-cf9b-4246-ac12-4d38dae5acd0-config\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323541 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2961879a-d786-4763-8353-554b884a741d-config\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323556 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2961879a-d786-4763-8353-554b884a741d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323574 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2961879a-d786-4763-8353-554b884a741d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323591 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbvkk\" (UniqueName: \"kubernetes.io/projected/6c3a574b-04b2-4672-9810-581f6ad101d5-kube-api-access-fbvkk\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323609 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c3a574b-04b2-4672-9810-581f6ad101d5-config\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323624 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2961879a-d786-4763-8353-554b884a741d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323660 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8gkr\" (UniqueName: \"kubernetes.io/projected/2961879a-d786-4763-8353-554b884a741d-kube-api-access-g8gkr\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.323678 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98483887-cf9b-4246-ac12-4d38dae5acd0-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.324477 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98483887-cf9b-4246-ac12-4d38dae5acd0-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.325076 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6c3a574b-04b2-4672-9810-581f6ad101d5-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.325331 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6c3a574b-04b2-4672-9810-581f6ad101d5-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.325456 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c3a574b-04b2-4672-9810-581f6ad101d5-config\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.326135 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2961879a-d786-4763-8353-554b884a741d-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.326533 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2961879a-d786-4763-8353-554b884a741d-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.326552 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2961879a-d786-4763-8353-554b884a741d-config\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.329264 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2961879a-d786-4763-8353-554b884a741d-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.330694 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98483887-cf9b-4246-ac12-4d38dae5acd0-config\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.331969 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98483887-cf9b-4246-ac12-4d38dae5acd0-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.332382 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c3a574b-04b2-4672-9810-581f6ad101d5-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.332486 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.332516 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5070a1d2-3ba9-424b-a938-527cd8a02d0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5070a1d2-3ba9-424b-a938-527cd8a02d0d\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b6033544d11db9e93895a174bd9a2bee17db5ef6c19a493b846103c7413a7a30/globalmount\"" pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.333208 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.333239 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8f8ce5c0-a133-42da-970a-68ab50238b2a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8f8ce5c0-a133-42da-970a-68ab50238b2a\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f1f866b9a466e8978209ebf179cccd1634dda8f48b9b8b04cab9fef16bf07521/globalmount\"" pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.333291 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.333322 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-76c26498-a42e-4a98-8a1d-bfaffa277b5e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-76c26498-a42e-4a98-8a1d-bfaffa277b5e\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/85e99b8e6792c464e95f590679b4a6337b4de0af95701159466eb1c4f3edceba/globalmount\"" pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.339871 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8gkr\" (UniqueName: \"kubernetes.io/projected/2961879a-d786-4763-8353-554b884a741d-kube-api-access-g8gkr\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.344279 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbvkk\" (UniqueName: \"kubernetes.io/projected/6c3a574b-04b2-4672-9810-581f6ad101d5-kube-api-access-fbvkk\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.347061 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvhcw\" (UniqueName: \"kubernetes.io/projected/98483887-cf9b-4246-ac12-4d38dae5acd0-kube-api-access-qvhcw\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.365898 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8f8ce5c0-a133-42da-970a-68ab50238b2a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8f8ce5c0-a133-42da-970a-68ab50238b2a\") pod \"ovsdbserver-sb-2\" (UID: \"98483887-cf9b-4246-ac12-4d38dae5acd0\") " pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.366187 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-76c26498-a42e-4a98-8a1d-bfaffa277b5e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-76c26498-a42e-4a98-8a1d-bfaffa277b5e\") pod \"ovsdbserver-sb-1\" (UID: \"6c3a574b-04b2-4672-9810-581f6ad101d5\") " pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.368339 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5070a1d2-3ba9-424b-a938-527cd8a02d0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5070a1d2-3ba9-424b-a938-527cd8a02d0d\") pod \"ovsdbserver-sb-0\" (UID: \"2961879a-d786-4763-8353-554b884a741d\") " pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.563238 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.575049 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.592879 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.595865 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.712089 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 23:53:54 crc kubenswrapper[4922]: W0929 23:53:54.736141 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51103d86_5353_4cb5_97ab_c287700eb9ec.slice/crio-7f774150b5eb7393bbefbf5213553ebe4a5ae2723d024df1d016d53a5ad1bf27 WatchSource:0}: Error finding container 7f774150b5eb7393bbefbf5213553ebe4a5ae2723d024df1d016d53a5ad1bf27: Status 404 returned error can't find the container with id 7f774150b5eb7393bbefbf5213553ebe4a5ae2723d024df1d016d53a5ad1bf27 Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.820452 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Sep 29 23:53:54 crc kubenswrapper[4922]: W0929 23:53:54.849028 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9069ed46_83fb_4a1e_9422_c87634b81112.slice/crio-587acf1c3ebbf8bea725e0150aef308787e4858e48f7d499365da6c8dc553125 WatchSource:0}: Error finding container 587acf1c3ebbf8bea725e0150aef308787e4858e48f7d499365da6c8dc553125: Status 404 returned error can't find the container with id 587acf1c3ebbf8bea725e0150aef308787e4858e48f7d499365da6c8dc553125 Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.967924 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"4f278e0d-047d-4387-8910-64bb296a8565","Type":"ContainerStarted","Data":"689a6dd0fc9fb0066a526e118fd78cd04a8781b4010eefedf8be18c19b2805e4"} Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.967963 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"4f278e0d-047d-4387-8910-64bb296a8565","Type":"ContainerStarted","Data":"43cf4373197f6af3dd1a176df321908b4877146a0d4d935255a71600b31f0a3d"} Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.969352 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"9069ed46-83fb-4a1e-9422-c87634b81112","Type":"ContainerStarted","Data":"587acf1c3ebbf8bea725e0150aef308787e4858e48f7d499365da6c8dc553125"} Sep 29 23:53:54 crc kubenswrapper[4922]: I0929 23:53:54.970490 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"51103d86-5353-4cb5-97ab-c287700eb9ec","Type":"ContainerStarted","Data":"7f774150b5eb7393bbefbf5213553ebe4a5ae2723d024df1d016d53a5ad1bf27"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.138472 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Sep 29 23:53:55 crc kubenswrapper[4922]: W0929 23:53:55.148706 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c3a574b_04b2_4672_9810_581f6ad101d5.slice/crio-0187d626de1c64e93781459fa8731d913630ffd03b61f6820ef82435c02016e3 WatchSource:0}: Error finding container 0187d626de1c64e93781459fa8731d913630ffd03b61f6820ef82435c02016e3: Status 404 returned error can't find the container with id 0187d626de1c64e93781459fa8731d913630ffd03b61f6820ef82435c02016e3 Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.268696 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Sep 29 23:53:55 crc kubenswrapper[4922]: W0929 23:53:55.271982 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98483887_cf9b_4246_ac12_4d38dae5acd0.slice/crio-2f85540802f64ef804ff4619cf558ce13fb3d1c2275c8f3528711936b64c2de5 WatchSource:0}: Error finding container 2f85540802f64ef804ff4619cf558ce13fb3d1c2275c8f3528711936b64c2de5: Status 404 returned error can't find the container with id 2f85540802f64ef804ff4619cf558ce13fb3d1c2275c8f3528711936b64c2de5 Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.811554 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 23:53:55 crc kubenswrapper[4922]: W0929 23:53:55.822012 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2961879a_d786_4763_8353_554b884a741d.slice/crio-628ccc98a095e45a1d26d42075aef7f4912b9e845f6972ea8fda65c4d76f5a3a WatchSource:0}: Error finding container 628ccc98a095e45a1d26d42075aef7f4912b9e845f6972ea8fda65c4d76f5a3a: Status 404 returned error can't find the container with id 628ccc98a095e45a1d26d42075aef7f4912b9e845f6972ea8fda65c4d76f5a3a Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.982251 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"98483887-cf9b-4246-ac12-4d38dae5acd0","Type":"ContainerStarted","Data":"13b02232593e21fdb8c5fc07fbd5ed4c256aef554cb3bd2f0fd3f2f03710b6aa"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.982321 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"98483887-cf9b-4246-ac12-4d38dae5acd0","Type":"ContainerStarted","Data":"19c32b2e6ca8082a02408eb9098987582aea499f1397da1c75ec4925026e88cb"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.982342 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"98483887-cf9b-4246-ac12-4d38dae5acd0","Type":"ContainerStarted","Data":"2f85540802f64ef804ff4619cf558ce13fb3d1c2275c8f3528711936b64c2de5"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.988678 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"9069ed46-83fb-4a1e-9422-c87634b81112","Type":"ContainerStarted","Data":"c275bddc8cc08f8a68d5e87f7ff7cdaf135bf52bb61043535797b834f0d693b9"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.988981 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"9069ed46-83fb-4a1e-9422-c87634b81112","Type":"ContainerStarted","Data":"0181191f2f5b65648833402cbb08b489bdd01e8e5cd2ee5732f36a029aed9869"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.991876 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2961879a-d786-4763-8353-554b884a741d","Type":"ContainerStarted","Data":"628ccc98a095e45a1d26d42075aef7f4912b9e845f6972ea8fda65c4d76f5a3a"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.994741 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"51103d86-5353-4cb5-97ab-c287700eb9ec","Type":"ContainerStarted","Data":"8ae91610ef2f97d92ece2af38d1700cfa5d04c0d3f150295add8ff44ab93961a"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.994801 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"51103d86-5353-4cb5-97ab-c287700eb9ec","Type":"ContainerStarted","Data":"9a2190e98acc2a3d1a6e6fa30d390db3ec652bd1601676ff90bd71c9352e184d"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.999781 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"6c3a574b-04b2-4672-9810-581f6ad101d5","Type":"ContainerStarted","Data":"fd22b96d38a64aede5be577471337060483f655afea684c6ab8daaba9cb29f48"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.999844 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"6c3a574b-04b2-4672-9810-581f6ad101d5","Type":"ContainerStarted","Data":"ef0e70bb927451bc6410a22a48fad1abe0a2f917e1c514daaf6d8caf8d6c760a"} Sep 29 23:53:55 crc kubenswrapper[4922]: I0929 23:53:55.999873 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"6c3a574b-04b2-4672-9810-581f6ad101d5","Type":"ContainerStarted","Data":"0187d626de1c64e93781459fa8731d913630ffd03b61f6820ef82435c02016e3"} Sep 29 23:53:56 crc kubenswrapper[4922]: I0929 23:53:56.002376 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"4f278e0d-047d-4387-8910-64bb296a8565","Type":"ContainerStarted","Data":"6d0ce3d2a53cfa072460796c4e9fef4a2933d21f988e4fc1a744a3fa1d404e75"} Sep 29 23:53:56 crc kubenswrapper[4922]: I0929 23:53:56.023134 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.023104724 podStartE2EDuration="4.023104724s" podCreationTimestamp="2025-09-29 23:53:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:53:56.017212799 +0000 UTC m=+5240.327501642" watchObservedRunningTime="2025-09-29 23:53:56.023104724 +0000 UTC m=+5240.333393577" Sep 29 23:53:56 crc kubenswrapper[4922]: I0929 23:53:56.043351 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.043325181 podStartE2EDuration="4.043325181s" podCreationTimestamp="2025-09-29 23:53:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:53:56.037063417 +0000 UTC m=+5240.347352270" watchObservedRunningTime="2025-09-29 23:53:56.043325181 +0000 UTC m=+5240.353614034" Sep 29 23:53:56 crc kubenswrapper[4922]: I0929 23:53:56.065329 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=4.065303092 podStartE2EDuration="4.065303092s" podCreationTimestamp="2025-09-29 23:53:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:53:56.053245635 +0000 UTC m=+5240.363534448" watchObservedRunningTime="2025-09-29 23:53:56.065303092 +0000 UTC m=+5240.375591945" Sep 29 23:53:56 crc kubenswrapper[4922]: I0929 23:53:56.079149 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=4.079134083 podStartE2EDuration="4.079134083s" podCreationTimestamp="2025-09-29 23:53:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:53:56.069312861 +0000 UTC m=+5240.379601714" watchObservedRunningTime="2025-09-29 23:53:56.079134083 +0000 UTC m=+5240.389422906" Sep 29 23:53:56 crc kubenswrapper[4922]: I0929 23:53:56.110200 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=4.110169407 podStartE2EDuration="4.110169407s" podCreationTimestamp="2025-09-29 23:53:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:53:56.098552411 +0000 UTC m=+5240.408841234" watchObservedRunningTime="2025-09-29 23:53:56.110169407 +0000 UTC m=+5240.420458260" Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.018688 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2961879a-d786-4763-8353-554b884a741d","Type":"ContainerStarted","Data":"3621bc0d4c8f2007eb6171bf0ea2d103646318076de3c455707abc2ab5cf515b"} Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.019257 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"2961879a-d786-4763-8353-554b884a741d","Type":"ContainerStarted","Data":"51f43bd88985cff89d692a9d1dc8e86383230aa6556d25a8518af5bac9a4ef64"} Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.058741 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=5.058710614 podStartE2EDuration="5.058710614s" podCreationTimestamp="2025-09-29 23:53:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:53:57.050864081 +0000 UTC m=+5241.361152924" watchObservedRunningTime="2025-09-29 23:53:57.058710614 +0000 UTC m=+5241.368999467" Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.165092 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.181661 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.188605 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.248054 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.576269 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.594050 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:57 crc kubenswrapper[4922]: I0929 23:53:57.597312 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:58 crc kubenswrapper[4922]: I0929 23:53:58.029427 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:58 crc kubenswrapper[4922]: I0929 23:53:58.912905 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:53:58 crc kubenswrapper[4922]: I0929 23:53:58.913428 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.108739 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.163987 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.190291 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.412361 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58b4847c5-smncx"] Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.414409 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.417116 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.427173 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58b4847c5-smncx"] Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.519380 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-ovsdbserver-nb\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.519474 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8f2s\" (UniqueName: \"kubernetes.io/projected/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-kube-api-access-t8f2s\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.519629 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-dns-svc\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.519692 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-config\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.575895 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.594085 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.596708 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.621097 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8f2s\" (UniqueName: \"kubernetes.io/projected/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-kube-api-access-t8f2s\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.621167 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-dns-svc\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.621195 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-config\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.622292 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-dns-svc\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.622419 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-ovsdbserver-nb\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.623102 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-config\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.621272 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-ovsdbserver-nb\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.640570 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8f2s\" (UniqueName: \"kubernetes.io/projected/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-kube-api-access-t8f2s\") pod \"dnsmasq-dns-58b4847c5-smncx\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:53:59 crc kubenswrapper[4922]: I0929 23:53:59.746190 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.227786 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58b4847c5-smncx"] Sep 29 23:54:00 crc kubenswrapper[4922]: W0929 23:54:00.236384 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a7b6e6f_ecb1_4426_92d4_d6b953a0cf5e.slice/crio-f0cf4356ad4dbfbcc0b39125d303f036ced5a38ccadd6dab21e8615f8464b581 WatchSource:0}: Error finding container f0cf4356ad4dbfbcc0b39125d303f036ced5a38ccadd6dab21e8615f8464b581: Status 404 returned error can't find the container with id f0cf4356ad4dbfbcc0b39125d303f036ced5a38ccadd6dab21e8615f8464b581 Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.238200 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.272295 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.317161 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.331124 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.642658 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.649656 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.657470 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.715666 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.733100 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.910086 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58b4847c5-smncx"] Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.929159 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cf678f797-zn5st"] Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.930598 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.932305 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 29 23:54:00 crc kubenswrapper[4922]: I0929 23:54:00.943380 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cf678f797-zn5st"] Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.047464 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slbs6\" (UniqueName: \"kubernetes.io/projected/43634687-ed85-4d66-807e-77c40c3d4e6a-kube-api-access-slbs6\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.047510 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-config\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.047544 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-sb\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.047569 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-nb\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.047884 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-dns-svc\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.052034 4922 generic.go:334] "Generic (PLEG): container finished" podID="8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" containerID="872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1" exitCode=0 Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.052179 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b4847c5-smncx" event={"ID":"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e","Type":"ContainerDied","Data":"872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1"} Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.052221 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b4847c5-smncx" event={"ID":"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e","Type":"ContainerStarted","Data":"f0cf4356ad4dbfbcc0b39125d303f036ced5a38ccadd6dab21e8615f8464b581"} Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.103629 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.150314 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-dns-svc\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.150827 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slbs6\" (UniqueName: \"kubernetes.io/projected/43634687-ed85-4d66-807e-77c40c3d4e6a-kube-api-access-slbs6\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.150878 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-config\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.150948 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-sb\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.150985 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-nb\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.151597 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-dns-svc\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.152229 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-sb\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.152597 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-config\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.152634 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-nb\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.173777 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slbs6\" (UniqueName: \"kubernetes.io/projected/43634687-ed85-4d66-807e-77c40c3d4e6a-kube-api-access-slbs6\") pod \"dnsmasq-dns-7cf678f797-zn5st\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.246526 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:01 crc kubenswrapper[4922]: I0929 23:54:01.692591 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cf678f797-zn5st"] Sep 29 23:54:01 crc kubenswrapper[4922]: W0929 23:54:01.698538 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43634687_ed85_4d66_807e_77c40c3d4e6a.slice/crio-227e54c1409c9c3a1d68d48d4a439121632e5d8e9776115644122aa6bc240917 WatchSource:0}: Error finding container 227e54c1409c9c3a1d68d48d4a439121632e5d8e9776115644122aa6bc240917: Status 404 returned error can't find the container with id 227e54c1409c9c3a1d68d48d4a439121632e5d8e9776115644122aa6bc240917 Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.063809 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b4847c5-smncx" event={"ID":"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e","Type":"ContainerStarted","Data":"27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b"} Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.063919 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58b4847c5-smncx" podUID="8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" containerName="dnsmasq-dns" containerID="cri-o://27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b" gracePeriod=10 Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.064140 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.066412 4922 generic.go:334] "Generic (PLEG): container finished" podID="43634687-ed85-4d66-807e-77c40c3d4e6a" containerID="5bf7451e171a77d23957f30eb754aa56283baa52b88596adbf48cabbe70ef135" exitCode=0 Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.066494 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" event={"ID":"43634687-ed85-4d66-807e-77c40c3d4e6a","Type":"ContainerDied","Data":"5bf7451e171a77d23957f30eb754aa56283baa52b88596adbf48cabbe70ef135"} Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.066569 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" event={"ID":"43634687-ed85-4d66-807e-77c40c3d4e6a","Type":"ContainerStarted","Data":"227e54c1409c9c3a1d68d48d4a439121632e5d8e9776115644122aa6bc240917"} Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.094125 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58b4847c5-smncx" podStartSLOduration=3.094106548 podStartE2EDuration="3.094106548s" podCreationTimestamp="2025-09-29 23:53:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:54:02.081890778 +0000 UTC m=+5246.392179601" watchObservedRunningTime="2025-09-29 23:54:02.094106548 +0000 UTC m=+5246.404395371" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.521323 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.677442 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-ovsdbserver-nb\") pod \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.677807 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8f2s\" (UniqueName: \"kubernetes.io/projected/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-kube-api-access-t8f2s\") pod \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.677849 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-dns-svc\") pod \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.677945 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-config\") pod \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\" (UID: \"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e\") " Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.683666 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-kube-api-access-t8f2s" (OuterVolumeSpecName: "kube-api-access-t8f2s") pod "8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" (UID: "8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e"). InnerVolumeSpecName "kube-api-access-t8f2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.727373 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" (UID: "8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.734883 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-config" (OuterVolumeSpecName: "config") pod "8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" (UID: "8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.752700 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" (UID: "8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.780065 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.780095 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.780135 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8f2s\" (UniqueName: \"kubernetes.io/projected/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-kube-api-access-t8f2s\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:02 crc kubenswrapper[4922]: I0929 23:54:02.780148 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.078365 4922 generic.go:334] "Generic (PLEG): container finished" podID="8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" containerID="27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b" exitCode=0 Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.078475 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b4847c5-smncx" event={"ID":"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e","Type":"ContainerDied","Data":"27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b"} Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.078515 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58b4847c5-smncx" event={"ID":"8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e","Type":"ContainerDied","Data":"f0cf4356ad4dbfbcc0b39125d303f036ced5a38ccadd6dab21e8615f8464b581"} Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.078540 4922 scope.go:117] "RemoveContainer" containerID="27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.078697 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58b4847c5-smncx" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.085581 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" event={"ID":"43634687-ed85-4d66-807e-77c40c3d4e6a","Type":"ContainerStarted","Data":"ee9b371d6d9a4d3c4432025412c08ef62f58ca0857c33e6f25052f1752ce8b77"} Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.086315 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.114225 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" podStartSLOduration=3.114204496 podStartE2EDuration="3.114204496s" podCreationTimestamp="2025-09-29 23:54:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:54:03.114115514 +0000 UTC m=+5247.424404337" watchObservedRunningTime="2025-09-29 23:54:03.114204496 +0000 UTC m=+5247.424493329" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.115558 4922 scope.go:117] "RemoveContainer" containerID="872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.161224 4922 scope.go:117] "RemoveContainer" containerID="27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b" Sep 29 23:54:03 crc kubenswrapper[4922]: E0929 23:54:03.161876 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b\": container with ID starting with 27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b not found: ID does not exist" containerID="27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.161929 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b"} err="failed to get container status \"27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b\": rpc error: code = NotFound desc = could not find container \"27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b\": container with ID starting with 27708a141492c5effa01525d62d311f0fe3a4fe12b5ea4b2b07f2772d4634d3b not found: ID does not exist" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.161961 4922 scope.go:117] "RemoveContainer" containerID="872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1" Sep 29 23:54:03 crc kubenswrapper[4922]: E0929 23:54:03.162236 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1\": container with ID starting with 872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1 not found: ID does not exist" containerID="872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.162265 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1"} err="failed to get container status \"872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1\": rpc error: code = NotFound desc = could not find container \"872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1\": container with ID starting with 872b11829ca597286e1b97fee0d71c699f8a7bc41dc2eafa0e9af9d2d9c3cbe1 not found: ID does not exist" Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.164904 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58b4847c5-smncx"] Sep 29 23:54:03 crc kubenswrapper[4922]: I0929 23:54:03.175639 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58b4847c5-smncx"] Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.019794 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Sep 29 23:54:04 crc kubenswrapper[4922]: E0929 23:54:04.020653 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" containerName="init" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.020674 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" containerName="init" Sep 29 23:54:04 crc kubenswrapper[4922]: E0929 23:54:04.020702 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" containerName="dnsmasq-dns" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.020714 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" containerName="dnsmasq-dns" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.021042 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" containerName="dnsmasq-dns" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.021969 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.024223 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.028378 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.203923 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcp5l\" (UniqueName: \"kubernetes.io/projected/ec894f89-6b62-4dee-80df-c4ed29e9d117-kube-api-access-rcp5l\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.204024 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.204594 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/ec894f89-6b62-4dee-80df-c4ed29e9d117-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.306204 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/ec894f89-6b62-4dee-80df-c4ed29e9d117-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.306299 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcp5l\" (UniqueName: \"kubernetes.io/projected/ec894f89-6b62-4dee-80df-c4ed29e9d117-kube-api-access-rcp5l\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.306344 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.310672 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.310720 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/88757547cd160bc13cf0df27e7d0b96cd5a43fb9e3427c0c5406b01e9748eb69/globalmount\"" pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.311041 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/ec894f89-6b62-4dee-80df-c4ed29e9d117-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.341647 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcp5l\" (UniqueName: \"kubernetes.io/projected/ec894f89-6b62-4dee-80df-c4ed29e9d117-kube-api-access-rcp5l\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.365786 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\") pod \"ovn-copy-data\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " pod="openstack/ovn-copy-data" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.431222 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e" path="/var/lib/kubelet/pods/8a7b6e6f-ecb1-4426-92d4-d6b953a0cf5e/volumes" Sep 29 23:54:04 crc kubenswrapper[4922]: I0929 23:54:04.643734 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Sep 29 23:54:05 crc kubenswrapper[4922]: W0929 23:54:05.010768 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec894f89_6b62_4dee_80df_c4ed29e9d117.slice/crio-3e045e483e590adc65f0cdd64e179b739550a4b8b85d7ff96b5d95dbae7eab37 WatchSource:0}: Error finding container 3e045e483e590adc65f0cdd64e179b739550a4b8b85d7ff96b5d95dbae7eab37: Status 404 returned error can't find the container with id 3e045e483e590adc65f0cdd64e179b739550a4b8b85d7ff96b5d95dbae7eab37 Sep 29 23:54:05 crc kubenswrapper[4922]: I0929 23:54:05.010932 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Sep 29 23:54:05 crc kubenswrapper[4922]: I0929 23:54:05.107192 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"ec894f89-6b62-4dee-80df-c4ed29e9d117","Type":"ContainerStarted","Data":"3e045e483e590adc65f0cdd64e179b739550a4b8b85d7ff96b5d95dbae7eab37"} Sep 29 23:54:06 crc kubenswrapper[4922]: I0929 23:54:06.121830 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"ec894f89-6b62-4dee-80df-c4ed29e9d117","Type":"ContainerStarted","Data":"86ad15efac297783eeab263e09ee7579ee7da29b56a86ceb84e37ded3d0a3156"} Sep 29 23:54:06 crc kubenswrapper[4922]: I0929 23:54:06.146423 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=4.146380066 podStartE2EDuration="4.146380066s" podCreationTimestamp="2025-09-29 23:54:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:54:06.142626164 +0000 UTC m=+5250.452915007" watchObservedRunningTime="2025-09-29 23:54:06.146380066 +0000 UTC m=+5250.456668889" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.249117 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.319746 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-7hjcz"] Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.320596 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" podUID="714fe763-a262-451b-87c5-a2bf6759aee8" containerName="dnsmasq-dns" containerID="cri-o://57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75" gracePeriod=10 Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.795927 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.797916 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.801047 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.801325 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.802671 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-jjgf6" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.819039 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.834023 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.840521 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.840731 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.840794 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-config\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.840868 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pq4r\" (UniqueName: \"kubernetes.io/projected/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-kube-api-access-8pq4r\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.840893 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-scripts\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.941712 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q75jk\" (UniqueName: \"kubernetes.io/projected/714fe763-a262-451b-87c5-a2bf6759aee8-kube-api-access-q75jk\") pod \"714fe763-a262-451b-87c5-a2bf6759aee8\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.941848 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-config\") pod \"714fe763-a262-451b-87c5-a2bf6759aee8\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.941947 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-dns-svc\") pod \"714fe763-a262-451b-87c5-a2bf6759aee8\" (UID: \"714fe763-a262-451b-87c5-a2bf6759aee8\") " Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.942109 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pq4r\" (UniqueName: \"kubernetes.io/projected/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-kube-api-access-8pq4r\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.942140 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-scripts\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.942239 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.942318 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.942345 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-config\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.943373 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.943512 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-scripts\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.944245 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-config\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.947653 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/714fe763-a262-451b-87c5-a2bf6759aee8-kube-api-access-q75jk" (OuterVolumeSpecName: "kube-api-access-q75jk") pod "714fe763-a262-451b-87c5-a2bf6759aee8" (UID: "714fe763-a262-451b-87c5-a2bf6759aee8"). InnerVolumeSpecName "kube-api-access-q75jk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.949382 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.967055 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pq4r\" (UniqueName: \"kubernetes.io/projected/966a79f9-9523-4d1e-a78b-af8bc4b8e51b-kube-api-access-8pq4r\") pod \"ovn-northd-0\" (UID: \"966a79f9-9523-4d1e-a78b-af8bc4b8e51b\") " pod="openstack/ovn-northd-0" Sep 29 23:54:11 crc kubenswrapper[4922]: I0929 23:54:11.999482 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "714fe763-a262-451b-87c5-a2bf6759aee8" (UID: "714fe763-a262-451b-87c5-a2bf6759aee8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.015115 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-config" (OuterVolumeSpecName: "config") pod "714fe763-a262-451b-87c5-a2bf6759aee8" (UID: "714fe763-a262-451b-87c5-a2bf6759aee8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.043840 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.043895 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q75jk\" (UniqueName: \"kubernetes.io/projected/714fe763-a262-451b-87c5-a2bf6759aee8-kube-api-access-q75jk\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.043909 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/714fe763-a262-451b-87c5-a2bf6759aee8-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.151611 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.198983 4922 generic.go:334] "Generic (PLEG): container finished" podID="714fe763-a262-451b-87c5-a2bf6759aee8" containerID="57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75" exitCode=0 Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.199033 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" event={"ID":"714fe763-a262-451b-87c5-a2bf6759aee8","Type":"ContainerDied","Data":"57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75"} Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.199066 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" event={"ID":"714fe763-a262-451b-87c5-a2bf6759aee8","Type":"ContainerDied","Data":"e1fe204ea3b6fbc85cce9e06289652928ab60b13b6024341a0be245ebf668f30"} Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.199086 4922 scope.go:117] "RemoveContainer" containerID="57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.199214 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.224033 4922 scope.go:117] "RemoveContainer" containerID="b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.244081 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-7hjcz"] Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.250046 4922 scope.go:117] "RemoveContainer" containerID="57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75" Sep 29 23:54:12 crc kubenswrapper[4922]: E0929 23:54:12.250559 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75\": container with ID starting with 57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75 not found: ID does not exist" containerID="57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.250605 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75"} err="failed to get container status \"57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75\": rpc error: code = NotFound desc = could not find container \"57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75\": container with ID starting with 57b9425ce85996b9c5965218c7d3352ce4f2c433ca99a8f201907ca8b39ace75 not found: ID does not exist" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.250632 4922 scope.go:117] "RemoveContainer" containerID="b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b" Sep 29 23:54:12 crc kubenswrapper[4922]: E0929 23:54:12.251030 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b\": container with ID starting with b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b not found: ID does not exist" containerID="b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.251078 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b"} err="failed to get container status \"b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b\": rpc error: code = NotFound desc = could not find container \"b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b\": container with ID starting with b99a9eaf267226efbbb45596906844c982b8374f2b12a69b5dfd3c60148cb56b not found: ID does not exist" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.254641 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b7946d7b9-7hjcz"] Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.435110 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="714fe763-a262-451b-87c5-a2bf6759aee8" path="/var/lib/kubelet/pods/714fe763-a262-451b-87c5-a2bf6759aee8/volumes" Sep 29 23:54:12 crc kubenswrapper[4922]: I0929 23:54:12.643084 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 23:54:12 crc kubenswrapper[4922]: W0929 23:54:12.646833 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod966a79f9_9523_4d1e_a78b_af8bc4b8e51b.slice/crio-2986c80f646862762437b0606acd5f355501ad8a71d60c9c83e72fc0d0044f6c WatchSource:0}: Error finding container 2986c80f646862762437b0606acd5f355501ad8a71d60c9c83e72fc0d0044f6c: Status 404 returned error can't find the container with id 2986c80f646862762437b0606acd5f355501ad8a71d60c9c83e72fc0d0044f6c Sep 29 23:54:13 crc kubenswrapper[4922]: I0929 23:54:13.210491 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"966a79f9-9523-4d1e-a78b-af8bc4b8e51b","Type":"ContainerStarted","Data":"b030950aa696c018f92462f84befd6c896e5e165acda083414b28db9248adb07"} Sep 29 23:54:13 crc kubenswrapper[4922]: I0929 23:54:13.210908 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"966a79f9-9523-4d1e-a78b-af8bc4b8e51b","Type":"ContainerStarted","Data":"a378e9f0f81d170857adc3e58876fceaf84a13a1fe02d882e42371a2f87761fe"} Sep 29 23:54:13 crc kubenswrapper[4922]: I0929 23:54:13.210935 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"966a79f9-9523-4d1e-a78b-af8bc4b8e51b","Type":"ContainerStarted","Data":"2986c80f646862762437b0606acd5f355501ad8a71d60c9c83e72fc0d0044f6c"} Sep 29 23:54:13 crc kubenswrapper[4922]: I0929 23:54:13.211434 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 29 23:54:13 crc kubenswrapper[4922]: I0929 23:54:13.246258 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.246230353 podStartE2EDuration="2.246230353s" podCreationTimestamp="2025-09-29 23:54:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:54:13.233330166 +0000 UTC m=+5257.543619019" watchObservedRunningTime="2025-09-29 23:54:13.246230353 +0000 UTC m=+5257.556519196" Sep 29 23:54:16 crc kubenswrapper[4922]: I0929 23:54:16.733063 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b7946d7b9-7hjcz" podUID="714fe763-a262-451b-87c5-a2bf6759aee8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.240:5353: i/o timeout" Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.606730 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-m57px"] Sep 29 23:54:17 crc kubenswrapper[4922]: E0929 23:54:17.607198 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="714fe763-a262-451b-87c5-a2bf6759aee8" containerName="dnsmasq-dns" Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.607232 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="714fe763-a262-451b-87c5-a2bf6759aee8" containerName="dnsmasq-dns" Sep 29 23:54:17 crc kubenswrapper[4922]: E0929 23:54:17.607273 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="714fe763-a262-451b-87c5-a2bf6759aee8" containerName="init" Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.607285 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="714fe763-a262-451b-87c5-a2bf6759aee8" containerName="init" Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.607556 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="714fe763-a262-451b-87c5-a2bf6759aee8" containerName="dnsmasq-dns" Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.608377 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-m57px" Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.622692 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-m57px"] Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.679332 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjjp7\" (UniqueName: \"kubernetes.io/projected/ad146be8-a637-4ae4-b353-4cb9e36b4d74-kube-api-access-pjjp7\") pod \"keystone-db-create-m57px\" (UID: \"ad146be8-a637-4ae4-b353-4cb9e36b4d74\") " pod="openstack/keystone-db-create-m57px" Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.780891 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjjp7\" (UniqueName: \"kubernetes.io/projected/ad146be8-a637-4ae4-b353-4cb9e36b4d74-kube-api-access-pjjp7\") pod \"keystone-db-create-m57px\" (UID: \"ad146be8-a637-4ae4-b353-4cb9e36b4d74\") " pod="openstack/keystone-db-create-m57px" Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.800499 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjjp7\" (UniqueName: \"kubernetes.io/projected/ad146be8-a637-4ae4-b353-4cb9e36b4d74-kube-api-access-pjjp7\") pod \"keystone-db-create-m57px\" (UID: \"ad146be8-a637-4ae4-b353-4cb9e36b4d74\") " pod="openstack/keystone-db-create-m57px" Sep 29 23:54:17 crc kubenswrapper[4922]: I0929 23:54:17.977527 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-m57px" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.112784 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lb4wd"] Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.125470 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.138930 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lb4wd"] Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.191761 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-utilities\") pod \"redhat-operators-lb4wd\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.192357 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt8tt\" (UniqueName: \"kubernetes.io/projected/abb1fd9f-44db-4ca1-b01a-efad6575bdda-kube-api-access-nt8tt\") pod \"redhat-operators-lb4wd\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.192512 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-catalog-content\") pod \"redhat-operators-lb4wd\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.295054 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-utilities\") pod \"redhat-operators-lb4wd\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.295140 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt8tt\" (UniqueName: \"kubernetes.io/projected/abb1fd9f-44db-4ca1-b01a-efad6575bdda-kube-api-access-nt8tt\") pod \"redhat-operators-lb4wd\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.295208 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-catalog-content\") pod \"redhat-operators-lb4wd\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.295629 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-utilities\") pod \"redhat-operators-lb4wd\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.295629 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-catalog-content\") pod \"redhat-operators-lb4wd\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.314831 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt8tt\" (UniqueName: \"kubernetes.io/projected/abb1fd9f-44db-4ca1-b01a-efad6575bdda-kube-api-access-nt8tt\") pod \"redhat-operators-lb4wd\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.461025 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-m57px"] Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.467971 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:18 crc kubenswrapper[4922]: I0929 23:54:18.918709 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lb4wd"] Sep 29 23:54:18 crc kubenswrapper[4922]: W0929 23:54:18.921344 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabb1fd9f_44db_4ca1_b01a_efad6575bdda.slice/crio-b84ca1a9d282c9bef90ed84c4fab2aea605ca797407261f950ce38c70b01eb14 WatchSource:0}: Error finding container b84ca1a9d282c9bef90ed84c4fab2aea605ca797407261f950ce38c70b01eb14: Status 404 returned error can't find the container with id b84ca1a9d282c9bef90ed84c4fab2aea605ca797407261f950ce38c70b01eb14 Sep 29 23:54:19 crc kubenswrapper[4922]: I0929 23:54:19.274544 4922 generic.go:334] "Generic (PLEG): container finished" podID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerID="cb918a646b419ea1b1cadcc9b68ed356fd702f9098d34de8017345a40aa44036" exitCode=0 Sep 29 23:54:19 crc kubenswrapper[4922]: I0929 23:54:19.274693 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lb4wd" event={"ID":"abb1fd9f-44db-4ca1-b01a-efad6575bdda","Type":"ContainerDied","Data":"cb918a646b419ea1b1cadcc9b68ed356fd702f9098d34de8017345a40aa44036"} Sep 29 23:54:19 crc kubenswrapper[4922]: I0929 23:54:19.274845 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lb4wd" event={"ID":"abb1fd9f-44db-4ca1-b01a-efad6575bdda","Type":"ContainerStarted","Data":"b84ca1a9d282c9bef90ed84c4fab2aea605ca797407261f950ce38c70b01eb14"} Sep 29 23:54:19 crc kubenswrapper[4922]: I0929 23:54:19.276723 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 23:54:19 crc kubenswrapper[4922]: I0929 23:54:19.279427 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-m57px" event={"ID":"ad146be8-a637-4ae4-b353-4cb9e36b4d74","Type":"ContainerDied","Data":"0e4f104bc48fe041d1a0cbe1d669cfa1eeb8f0d3f288bd5f805117e16c872d5e"} Sep 29 23:54:19 crc kubenswrapper[4922]: I0929 23:54:19.279815 4922 generic.go:334] "Generic (PLEG): container finished" podID="ad146be8-a637-4ae4-b353-4cb9e36b4d74" containerID="0e4f104bc48fe041d1a0cbe1d669cfa1eeb8f0d3f288bd5f805117e16c872d5e" exitCode=0 Sep 29 23:54:19 crc kubenswrapper[4922]: I0929 23:54:19.279892 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-m57px" event={"ID":"ad146be8-a637-4ae4-b353-4cb9e36b4d74","Type":"ContainerStarted","Data":"4b7d62184947f2621320b99b9d878a2d74cdd8752410775b8be34e882094f1d3"} Sep 29 23:54:20 crc kubenswrapper[4922]: I0929 23:54:20.680238 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-m57px" Sep 29 23:54:20 crc kubenswrapper[4922]: I0929 23:54:20.841021 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjjp7\" (UniqueName: \"kubernetes.io/projected/ad146be8-a637-4ae4-b353-4cb9e36b4d74-kube-api-access-pjjp7\") pod \"ad146be8-a637-4ae4-b353-4cb9e36b4d74\" (UID: \"ad146be8-a637-4ae4-b353-4cb9e36b4d74\") " Sep 29 23:54:20 crc kubenswrapper[4922]: I0929 23:54:20.850634 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad146be8-a637-4ae4-b353-4cb9e36b4d74-kube-api-access-pjjp7" (OuterVolumeSpecName: "kube-api-access-pjjp7") pod "ad146be8-a637-4ae4-b353-4cb9e36b4d74" (UID: "ad146be8-a637-4ae4-b353-4cb9e36b4d74"). InnerVolumeSpecName "kube-api-access-pjjp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:54:20 crc kubenswrapper[4922]: I0929 23:54:20.943620 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjjp7\" (UniqueName: \"kubernetes.io/projected/ad146be8-a637-4ae4-b353-4cb9e36b4d74-kube-api-access-pjjp7\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:21 crc kubenswrapper[4922]: I0929 23:54:21.304032 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-m57px" event={"ID":"ad146be8-a637-4ae4-b353-4cb9e36b4d74","Type":"ContainerDied","Data":"4b7d62184947f2621320b99b9d878a2d74cdd8752410775b8be34e882094f1d3"} Sep 29 23:54:21 crc kubenswrapper[4922]: I0929 23:54:21.304718 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b7d62184947f2621320b99b9d878a2d74cdd8752410775b8be34e882094f1d3" Sep 29 23:54:21 crc kubenswrapper[4922]: I0929 23:54:21.304071 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-m57px" Sep 29 23:54:21 crc kubenswrapper[4922]: I0929 23:54:21.307677 4922 generic.go:334] "Generic (PLEG): container finished" podID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerID="4c086413e2ef6eb2bacd8b2db2413b46a9a6cd54822d2220d6e8622492bc0f33" exitCode=0 Sep 29 23:54:21 crc kubenswrapper[4922]: I0929 23:54:21.307744 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lb4wd" event={"ID":"abb1fd9f-44db-4ca1-b01a-efad6575bdda","Type":"ContainerDied","Data":"4c086413e2ef6eb2bacd8b2db2413b46a9a6cd54822d2220d6e8622492bc0f33"} Sep 29 23:54:22 crc kubenswrapper[4922]: I0929 23:54:22.247643 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 29 23:54:22 crc kubenswrapper[4922]: I0929 23:54:22.328134 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lb4wd" event={"ID":"abb1fd9f-44db-4ca1-b01a-efad6575bdda","Type":"ContainerStarted","Data":"a749b8c73b62343559d824e12d0142a56ec6b7252ba09e597ab2934b9550b93b"} Sep 29 23:54:22 crc kubenswrapper[4922]: I0929 23:54:22.348537 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lb4wd" podStartSLOduration=1.858300185 podStartE2EDuration="4.348512956s" podCreationTimestamp="2025-09-29 23:54:18 +0000 UTC" firstStartedPulling="2025-09-29 23:54:19.276318591 +0000 UTC m=+5263.586607404" lastFinishedPulling="2025-09-29 23:54:21.766531332 +0000 UTC m=+5266.076820175" observedRunningTime="2025-09-29 23:54:22.343716808 +0000 UTC m=+5266.654005641" watchObservedRunningTime="2025-09-29 23:54:22.348512956 +0000 UTC m=+5266.658801789" Sep 29 23:54:27 crc kubenswrapper[4922]: I0929 23:54:27.680561 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-f160-account-create-k5ngt"] Sep 29 23:54:27 crc kubenswrapper[4922]: E0929 23:54:27.681121 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad146be8-a637-4ae4-b353-4cb9e36b4d74" containerName="mariadb-database-create" Sep 29 23:54:27 crc kubenswrapper[4922]: I0929 23:54:27.681132 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad146be8-a637-4ae4-b353-4cb9e36b4d74" containerName="mariadb-database-create" Sep 29 23:54:27 crc kubenswrapper[4922]: I0929 23:54:27.681268 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad146be8-a637-4ae4-b353-4cb9e36b4d74" containerName="mariadb-database-create" Sep 29 23:54:27 crc kubenswrapper[4922]: I0929 23:54:27.689109 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f160-account-create-k5ngt" Sep 29 23:54:27 crc kubenswrapper[4922]: I0929 23:54:27.691855 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f160-account-create-k5ngt"] Sep 29 23:54:27 crc kubenswrapper[4922]: I0929 23:54:27.692013 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 29 23:54:27 crc kubenswrapper[4922]: I0929 23:54:27.779021 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8dnd\" (UniqueName: \"kubernetes.io/projected/b79e6c5a-7a81-448e-babc-d7d764ad0650-kube-api-access-b8dnd\") pod \"keystone-f160-account-create-k5ngt\" (UID: \"b79e6c5a-7a81-448e-babc-d7d764ad0650\") " pod="openstack/keystone-f160-account-create-k5ngt" Sep 29 23:54:27 crc kubenswrapper[4922]: I0929 23:54:27.880504 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8dnd\" (UniqueName: \"kubernetes.io/projected/b79e6c5a-7a81-448e-babc-d7d764ad0650-kube-api-access-b8dnd\") pod \"keystone-f160-account-create-k5ngt\" (UID: \"b79e6c5a-7a81-448e-babc-d7d764ad0650\") " pod="openstack/keystone-f160-account-create-k5ngt" Sep 29 23:54:27 crc kubenswrapper[4922]: I0929 23:54:27.901482 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8dnd\" (UniqueName: \"kubernetes.io/projected/b79e6c5a-7a81-448e-babc-d7d764ad0650-kube-api-access-b8dnd\") pod \"keystone-f160-account-create-k5ngt\" (UID: \"b79e6c5a-7a81-448e-babc-d7d764ad0650\") " pod="openstack/keystone-f160-account-create-k5ngt" Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.013293 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f160-account-create-k5ngt" Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.469265 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.469663 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.518046 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f160-account-create-k5ngt"] Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.549460 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.912454 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.912542 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.912602 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.913778 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 23:54:28 crc kubenswrapper[4922]: I0929 23:54:28.913884 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" gracePeriod=600 Sep 29 23:54:29 crc kubenswrapper[4922]: E0929 23:54:29.050432 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:54:29 crc kubenswrapper[4922]: I0929 23:54:29.413844 4922 generic.go:334] "Generic (PLEG): container finished" podID="b79e6c5a-7a81-448e-babc-d7d764ad0650" containerID="8508fd4ead24194ad605678d39e01c7200e6598497999c3ac33d8a39531b199f" exitCode=0 Sep 29 23:54:29 crc kubenswrapper[4922]: I0929 23:54:29.413967 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f160-account-create-k5ngt" event={"ID":"b79e6c5a-7a81-448e-babc-d7d764ad0650","Type":"ContainerDied","Data":"8508fd4ead24194ad605678d39e01c7200e6598497999c3ac33d8a39531b199f"} Sep 29 23:54:29 crc kubenswrapper[4922]: I0929 23:54:29.414001 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f160-account-create-k5ngt" event={"ID":"b79e6c5a-7a81-448e-babc-d7d764ad0650","Type":"ContainerStarted","Data":"1ff0cd14ce8574d6b0c9d86cdc4cdeecabd91a9ff20092fccc31e30b4d2c93b1"} Sep 29 23:54:29 crc kubenswrapper[4922]: I0929 23:54:29.416771 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" exitCode=0 Sep 29 23:54:29 crc kubenswrapper[4922]: I0929 23:54:29.417636 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7"} Sep 29 23:54:29 crc kubenswrapper[4922]: I0929 23:54:29.417769 4922 scope.go:117] "RemoveContainer" containerID="ff9f576c4f920051ee029d64618cf6e7e349c0b190d881ecf9094b87fe73dac7" Sep 29 23:54:29 crc kubenswrapper[4922]: I0929 23:54:29.418379 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:54:29 crc kubenswrapper[4922]: E0929 23:54:29.418755 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:54:29 crc kubenswrapper[4922]: I0929 23:54:29.499917 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:29 crc kubenswrapper[4922]: I0929 23:54:29.572505 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lb4wd"] Sep 29 23:54:30 crc kubenswrapper[4922]: I0929 23:54:30.810388 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f160-account-create-k5ngt" Sep 29 23:54:30 crc kubenswrapper[4922]: I0929 23:54:30.939358 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8dnd\" (UniqueName: \"kubernetes.io/projected/b79e6c5a-7a81-448e-babc-d7d764ad0650-kube-api-access-b8dnd\") pod \"b79e6c5a-7a81-448e-babc-d7d764ad0650\" (UID: \"b79e6c5a-7a81-448e-babc-d7d764ad0650\") " Sep 29 23:54:30 crc kubenswrapper[4922]: I0929 23:54:30.948247 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b79e6c5a-7a81-448e-babc-d7d764ad0650-kube-api-access-b8dnd" (OuterVolumeSpecName: "kube-api-access-b8dnd") pod "b79e6c5a-7a81-448e-babc-d7d764ad0650" (UID: "b79e6c5a-7a81-448e-babc-d7d764ad0650"). InnerVolumeSpecName "kube-api-access-b8dnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:54:31 crc kubenswrapper[4922]: I0929 23:54:31.041096 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8dnd\" (UniqueName: \"kubernetes.io/projected/b79e6c5a-7a81-448e-babc-d7d764ad0650-kube-api-access-b8dnd\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:31 crc kubenswrapper[4922]: I0929 23:54:31.442186 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f160-account-create-k5ngt" Sep 29 23:54:31 crc kubenswrapper[4922]: I0929 23:54:31.442200 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f160-account-create-k5ngt" event={"ID":"b79e6c5a-7a81-448e-babc-d7d764ad0650","Type":"ContainerDied","Data":"1ff0cd14ce8574d6b0c9d86cdc4cdeecabd91a9ff20092fccc31e30b4d2c93b1"} Sep 29 23:54:31 crc kubenswrapper[4922]: I0929 23:54:31.442247 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ff0cd14ce8574d6b0c9d86cdc4cdeecabd91a9ff20092fccc31e30b4d2c93b1" Sep 29 23:54:31 crc kubenswrapper[4922]: I0929 23:54:31.442292 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lb4wd" podUID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerName="registry-server" containerID="cri-o://a749b8c73b62343559d824e12d0142a56ec6b7252ba09e597ab2934b9550b93b" gracePeriod=2 Sep 29 23:54:32 crc kubenswrapper[4922]: I0929 23:54:32.456307 4922 generic.go:334] "Generic (PLEG): container finished" podID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerID="a749b8c73b62343559d824e12d0142a56ec6b7252ba09e597ab2934b9550b93b" exitCode=0 Sep 29 23:54:32 crc kubenswrapper[4922]: I0929 23:54:32.456384 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lb4wd" event={"ID":"abb1fd9f-44db-4ca1-b01a-efad6575bdda","Type":"ContainerDied","Data":"a749b8c73b62343559d824e12d0142a56ec6b7252ba09e597ab2934b9550b93b"} Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.096054 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.132859 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-v4vgm"] Sep 29 23:54:33 crc kubenswrapper[4922]: E0929 23:54:33.133370 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerName="registry-server" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.133466 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerName="registry-server" Sep 29 23:54:33 crc kubenswrapper[4922]: E0929 23:54:33.133528 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerName="extract-content" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.133686 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerName="extract-content" Sep 29 23:54:33 crc kubenswrapper[4922]: E0929 23:54:33.133812 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b79e6c5a-7a81-448e-babc-d7d764ad0650" containerName="mariadb-account-create" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.133920 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b79e6c5a-7a81-448e-babc-d7d764ad0650" containerName="mariadb-account-create" Sep 29 23:54:33 crc kubenswrapper[4922]: E0929 23:54:33.134002 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerName="extract-utilities" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.134119 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerName="extract-utilities" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.134356 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" containerName="registry-server" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.134448 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b79e6c5a-7a81-448e-babc-d7d764ad0650" containerName="mariadb-account-create" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.135039 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.137733 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.137822 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.138989 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.139153 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-xhhkq" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.159591 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-v4vgm"] Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.177305 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt8tt\" (UniqueName: \"kubernetes.io/projected/abb1fd9f-44db-4ca1-b01a-efad6575bdda-kube-api-access-nt8tt\") pod \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.177475 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-utilities\") pod \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.177508 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-catalog-content\") pod \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\" (UID: \"abb1fd9f-44db-4ca1-b01a-efad6575bdda\") " Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.178530 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-utilities" (OuterVolumeSpecName: "utilities") pod "abb1fd9f-44db-4ca1-b01a-efad6575bdda" (UID: "abb1fd9f-44db-4ca1-b01a-efad6575bdda"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.183273 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abb1fd9f-44db-4ca1-b01a-efad6575bdda-kube-api-access-nt8tt" (OuterVolumeSpecName: "kube-api-access-nt8tt") pod "abb1fd9f-44db-4ca1-b01a-efad6575bdda" (UID: "abb1fd9f-44db-4ca1-b01a-efad6575bdda"). InnerVolumeSpecName "kube-api-access-nt8tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.273627 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "abb1fd9f-44db-4ca1-b01a-efad6575bdda" (UID: "abb1fd9f-44db-4ca1-b01a-efad6575bdda"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.278700 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-config-data\") pod \"keystone-db-sync-v4vgm\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.278898 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw4pc\" (UniqueName: \"kubernetes.io/projected/8b85e9b7-babd-4976-899c-58b5cf1b4551-kube-api-access-vw4pc\") pod \"keystone-db-sync-v4vgm\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.278994 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-combined-ca-bundle\") pod \"keystone-db-sync-v4vgm\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.279159 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt8tt\" (UniqueName: \"kubernetes.io/projected/abb1fd9f-44db-4ca1-b01a-efad6575bdda-kube-api-access-nt8tt\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.279226 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.279289 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb1fd9f-44db-4ca1-b01a-efad6575bdda-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.380614 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-config-data\") pod \"keystone-db-sync-v4vgm\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.380687 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw4pc\" (UniqueName: \"kubernetes.io/projected/8b85e9b7-babd-4976-899c-58b5cf1b4551-kube-api-access-vw4pc\") pod \"keystone-db-sync-v4vgm\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.380736 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-combined-ca-bundle\") pod \"keystone-db-sync-v4vgm\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.386268 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-combined-ca-bundle\") pod \"keystone-db-sync-v4vgm\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.387857 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-config-data\") pod \"keystone-db-sync-v4vgm\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.410870 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw4pc\" (UniqueName: \"kubernetes.io/projected/8b85e9b7-babd-4976-899c-58b5cf1b4551-kube-api-access-vw4pc\") pod \"keystone-db-sync-v4vgm\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.454966 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.466855 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lb4wd" event={"ID":"abb1fd9f-44db-4ca1-b01a-efad6575bdda","Type":"ContainerDied","Data":"b84ca1a9d282c9bef90ed84c4fab2aea605ca797407261f950ce38c70b01eb14"} Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.467868 4922 scope.go:117] "RemoveContainer" containerID="a749b8c73b62343559d824e12d0142a56ec6b7252ba09e597ab2934b9550b93b" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.468306 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lb4wd" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.507520 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lb4wd"] Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.514050 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lb4wd"] Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.521770 4922 scope.go:117] "RemoveContainer" containerID="4c086413e2ef6eb2bacd8b2db2413b46a9a6cd54822d2220d6e8622492bc0f33" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.551702 4922 scope.go:117] "RemoveContainer" containerID="cb918a646b419ea1b1cadcc9b68ed356fd702f9098d34de8017345a40aa44036" Sep 29 23:54:33 crc kubenswrapper[4922]: I0929 23:54:33.993887 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-v4vgm"] Sep 29 23:54:34 crc kubenswrapper[4922]: I0929 23:54:34.437997 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abb1fd9f-44db-4ca1-b01a-efad6575bdda" path="/var/lib/kubelet/pods/abb1fd9f-44db-4ca1-b01a-efad6575bdda/volumes" Sep 29 23:54:34 crc kubenswrapper[4922]: I0929 23:54:34.488214 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v4vgm" event={"ID":"8b85e9b7-babd-4976-899c-58b5cf1b4551","Type":"ContainerStarted","Data":"07de248981f6e77a2883923993ffab71ac32dde645b81bbf70edb0ad1eda6425"} Sep 29 23:54:34 crc kubenswrapper[4922]: I0929 23:54:34.488271 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v4vgm" event={"ID":"8b85e9b7-babd-4976-899c-58b5cf1b4551","Type":"ContainerStarted","Data":"90fb2c3044b02a24281f4443cdb67320cea4e849192f1a6656a18d3b89840e57"} Sep 29 23:54:34 crc kubenswrapper[4922]: I0929 23:54:34.514522 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-v4vgm" podStartSLOduration=1.514493495 podStartE2EDuration="1.514493495s" podCreationTimestamp="2025-09-29 23:54:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:54:34.505857452 +0000 UTC m=+5278.816146295" watchObservedRunningTime="2025-09-29 23:54:34.514493495 +0000 UTC m=+5278.824782338" Sep 29 23:54:36 crc kubenswrapper[4922]: I0929 23:54:36.516330 4922 generic.go:334] "Generic (PLEG): container finished" podID="8b85e9b7-babd-4976-899c-58b5cf1b4551" containerID="07de248981f6e77a2883923993ffab71ac32dde645b81bbf70edb0ad1eda6425" exitCode=0 Sep 29 23:54:36 crc kubenswrapper[4922]: I0929 23:54:36.516446 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v4vgm" event={"ID":"8b85e9b7-babd-4976-899c-58b5cf1b4551","Type":"ContainerDied","Data":"07de248981f6e77a2883923993ffab71ac32dde645b81bbf70edb0ad1eda6425"} Sep 29 23:54:37 crc kubenswrapper[4922]: I0929 23:54:37.994756 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.074956 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-combined-ca-bundle\") pod \"8b85e9b7-babd-4976-899c-58b5cf1b4551\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.075468 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-config-data\") pod \"8b85e9b7-babd-4976-899c-58b5cf1b4551\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.075508 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw4pc\" (UniqueName: \"kubernetes.io/projected/8b85e9b7-babd-4976-899c-58b5cf1b4551-kube-api-access-vw4pc\") pod \"8b85e9b7-babd-4976-899c-58b5cf1b4551\" (UID: \"8b85e9b7-babd-4976-899c-58b5cf1b4551\") " Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.083506 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b85e9b7-babd-4976-899c-58b5cf1b4551-kube-api-access-vw4pc" (OuterVolumeSpecName: "kube-api-access-vw4pc") pod "8b85e9b7-babd-4976-899c-58b5cf1b4551" (UID: "8b85e9b7-babd-4976-899c-58b5cf1b4551"). InnerVolumeSpecName "kube-api-access-vw4pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.117724 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b85e9b7-babd-4976-899c-58b5cf1b4551" (UID: "8b85e9b7-babd-4976-899c-58b5cf1b4551"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.133670 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-config-data" (OuterVolumeSpecName: "config-data") pod "8b85e9b7-babd-4976-899c-58b5cf1b4551" (UID: "8b85e9b7-babd-4976-899c-58b5cf1b4551"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.177934 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.177981 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw4pc\" (UniqueName: \"kubernetes.io/projected/8b85e9b7-babd-4976-899c-58b5cf1b4551-kube-api-access-vw4pc\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.178001 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b85e9b7-babd-4976-899c-58b5cf1b4551-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.537772 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v4vgm" event={"ID":"8b85e9b7-babd-4976-899c-58b5cf1b4551","Type":"ContainerDied","Data":"90fb2c3044b02a24281f4443cdb67320cea4e849192f1a6656a18d3b89840e57"} Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.537819 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v4vgm" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.537842 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90fb2c3044b02a24281f4443cdb67320cea4e849192f1a6656a18d3b89840e57" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.800565 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5cbf4f8d45-r8lm9"] Sep 29 23:54:38 crc kubenswrapper[4922]: E0929 23:54:38.800957 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b85e9b7-babd-4976-899c-58b5cf1b4551" containerName="keystone-db-sync" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.800979 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b85e9b7-babd-4976-899c-58b5cf1b4551" containerName="keystone-db-sync" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.801207 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b85e9b7-babd-4976-899c-58b5cf1b4551" containerName="keystone-db-sync" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.802243 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.824969 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-wkdsj"] Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.826334 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.828637 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.828954 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.828982 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.829046 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-xhhkq" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.843520 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cbf4f8d45-r8lm9"] Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.854861 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-wkdsj"] Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.893743 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-credential-keys\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.893801 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnknl\" (UniqueName: \"kubernetes.io/projected/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-kube-api-access-dnknl\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.893827 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-sb\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.893881 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-dns-svc\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.893901 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-config\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.893918 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-fernet-keys\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.893950 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdj29\" (UniqueName: \"kubernetes.io/projected/d649e447-9a28-4858-8390-f5f05948d127-kube-api-access-kdj29\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.893966 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-config-data\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.894163 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-nb\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.894212 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-scripts\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.894235 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-combined-ca-bundle\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.995970 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-dns-svc\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996022 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-config\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996043 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-fernet-keys\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996073 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdj29\" (UniqueName: \"kubernetes.io/projected/d649e447-9a28-4858-8390-f5f05948d127-kube-api-access-kdj29\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996090 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-config-data\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996130 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-nb\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996153 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-scripts\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996173 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-combined-ca-bundle\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996204 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-credential-keys\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996228 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnknl\" (UniqueName: \"kubernetes.io/projected/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-kube-api-access-dnknl\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.996253 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-sb\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.997201 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-dns-svc\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:38 crc kubenswrapper[4922]: I0929 23:54:38.998077 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-sb\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.000695 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-credential-keys\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.000754 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-fernet-keys\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.001530 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-nb\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.001878 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-config\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.002307 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-config-data\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.005141 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-scripts\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.005723 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-combined-ca-bundle\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.032337 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdj29\" (UniqueName: \"kubernetes.io/projected/d649e447-9a28-4858-8390-f5f05948d127-kube-api-access-kdj29\") pod \"dnsmasq-dns-5cbf4f8d45-r8lm9\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.045613 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnknl\" (UniqueName: \"kubernetes.io/projected/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-kube-api-access-dnknl\") pod \"keystone-bootstrap-wkdsj\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.118146 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.139824 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.639148 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5cbf4f8d45-r8lm9"] Sep 29 23:54:39 crc kubenswrapper[4922]: W0929 23:54:39.645876 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd649e447_9a28_4858_8390_f5f05948d127.slice/crio-e45886f72fc9b015b97e00a57e1383b5210e5455e8965528d82a235203b9533e WatchSource:0}: Error finding container e45886f72fc9b015b97e00a57e1383b5210e5455e8965528d82a235203b9533e: Status 404 returned error can't find the container with id e45886f72fc9b015b97e00a57e1383b5210e5455e8965528d82a235203b9533e Sep 29 23:54:39 crc kubenswrapper[4922]: I0929 23:54:39.728416 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-wkdsj"] Sep 29 23:54:39 crc kubenswrapper[4922]: W0929 23:54:39.751705 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf679ed8d_26ea_4469_a862_a4f1dd5d07ef.slice/crio-3c29c729c82f2a21c13911fcbf016db8f1ee033fde47e23b497f56efa4d5c5e4 WatchSource:0}: Error finding container 3c29c729c82f2a21c13911fcbf016db8f1ee033fde47e23b497f56efa4d5c5e4: Status 404 returned error can't find the container with id 3c29c729c82f2a21c13911fcbf016db8f1ee033fde47e23b497f56efa4d5c5e4 Sep 29 23:54:39 crc kubenswrapper[4922]: E0929 23:54:39.964498 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd649e447_9a28_4858_8390_f5f05948d127.slice/crio-conmon-c5847ddcfb1f9a7fefbaeccbc1de5e859a3ef6057479270e768f943e2007aa82.scope\": RecentStats: unable to find data in memory cache]" Sep 29 23:54:40 crc kubenswrapper[4922]: I0929 23:54:40.434244 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:54:40 crc kubenswrapper[4922]: E0929 23:54:40.435161 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:54:40 crc kubenswrapper[4922]: I0929 23:54:40.556984 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wkdsj" event={"ID":"f679ed8d-26ea-4469-a862-a4f1dd5d07ef","Type":"ContainerStarted","Data":"ca2850b54ce7078105fa7ad90b152503fc253a16cc4825958fb7455221ac82bb"} Sep 29 23:54:40 crc kubenswrapper[4922]: I0929 23:54:40.557176 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wkdsj" event={"ID":"f679ed8d-26ea-4469-a862-a4f1dd5d07ef","Type":"ContainerStarted","Data":"3c29c729c82f2a21c13911fcbf016db8f1ee033fde47e23b497f56efa4d5c5e4"} Sep 29 23:54:40 crc kubenswrapper[4922]: I0929 23:54:40.558780 4922 generic.go:334] "Generic (PLEG): container finished" podID="d649e447-9a28-4858-8390-f5f05948d127" containerID="c5847ddcfb1f9a7fefbaeccbc1de5e859a3ef6057479270e768f943e2007aa82" exitCode=0 Sep 29 23:54:40 crc kubenswrapper[4922]: I0929 23:54:40.558879 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" event={"ID":"d649e447-9a28-4858-8390-f5f05948d127","Type":"ContainerDied","Data":"c5847ddcfb1f9a7fefbaeccbc1de5e859a3ef6057479270e768f943e2007aa82"} Sep 29 23:54:40 crc kubenswrapper[4922]: I0929 23:54:40.559096 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" event={"ID":"d649e447-9a28-4858-8390-f5f05948d127","Type":"ContainerStarted","Data":"e45886f72fc9b015b97e00a57e1383b5210e5455e8965528d82a235203b9533e"} Sep 29 23:54:40 crc kubenswrapper[4922]: I0929 23:54:40.584805 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-wkdsj" podStartSLOduration=2.584786643 podStartE2EDuration="2.584786643s" podCreationTimestamp="2025-09-29 23:54:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:54:40.581109892 +0000 UTC m=+5284.891398705" watchObservedRunningTime="2025-09-29 23:54:40.584786643 +0000 UTC m=+5284.895075466" Sep 29 23:54:41 crc kubenswrapper[4922]: I0929 23:54:41.567989 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" event={"ID":"d649e447-9a28-4858-8390-f5f05948d127","Type":"ContainerStarted","Data":"e7f868a6a0cf9c0e09ac0ffdbf6acb055837bb6e517536393e38fc4f87555015"} Sep 29 23:54:41 crc kubenswrapper[4922]: I0929 23:54:41.590928 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" podStartSLOduration=3.590906155 podStartE2EDuration="3.590906155s" podCreationTimestamp="2025-09-29 23:54:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:54:41.584746484 +0000 UTC m=+5285.895035307" watchObservedRunningTime="2025-09-29 23:54:41.590906155 +0000 UTC m=+5285.901194968" Sep 29 23:54:42 crc kubenswrapper[4922]: I0929 23:54:42.594646 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:43 crc kubenswrapper[4922]: I0929 23:54:43.607371 4922 generic.go:334] "Generic (PLEG): container finished" podID="f679ed8d-26ea-4469-a862-a4f1dd5d07ef" containerID="ca2850b54ce7078105fa7ad90b152503fc253a16cc4825958fb7455221ac82bb" exitCode=0 Sep 29 23:54:43 crc kubenswrapper[4922]: I0929 23:54:43.607485 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wkdsj" event={"ID":"f679ed8d-26ea-4469-a862-a4f1dd5d07ef","Type":"ContainerDied","Data":"ca2850b54ce7078105fa7ad90b152503fc253a16cc4825958fb7455221ac82bb"} Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.003782 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.129759 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnknl\" (UniqueName: \"kubernetes.io/projected/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-kube-api-access-dnknl\") pod \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.129856 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-combined-ca-bundle\") pod \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.129884 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-scripts\") pod \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.129913 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-config-data\") pod \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.129980 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-credential-keys\") pod \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.130039 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-fernet-keys\") pod \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\" (UID: \"f679ed8d-26ea-4469-a862-a4f1dd5d07ef\") " Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.137027 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f679ed8d-26ea-4469-a862-a4f1dd5d07ef" (UID: "f679ed8d-26ea-4469-a862-a4f1dd5d07ef"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.137336 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-scripts" (OuterVolumeSpecName: "scripts") pod "f679ed8d-26ea-4469-a862-a4f1dd5d07ef" (UID: "f679ed8d-26ea-4469-a862-a4f1dd5d07ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.138359 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-kube-api-access-dnknl" (OuterVolumeSpecName: "kube-api-access-dnknl") pod "f679ed8d-26ea-4469-a862-a4f1dd5d07ef" (UID: "f679ed8d-26ea-4469-a862-a4f1dd5d07ef"). InnerVolumeSpecName "kube-api-access-dnknl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.141571 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "f679ed8d-26ea-4469-a862-a4f1dd5d07ef" (UID: "f679ed8d-26ea-4469-a862-a4f1dd5d07ef"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.154850 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-config-data" (OuterVolumeSpecName: "config-data") pod "f679ed8d-26ea-4469-a862-a4f1dd5d07ef" (UID: "f679ed8d-26ea-4469-a862-a4f1dd5d07ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.173382 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f679ed8d-26ea-4469-a862-a4f1dd5d07ef" (UID: "f679ed8d-26ea-4469-a862-a4f1dd5d07ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.231766 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.231805 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.231818 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.231829 4922 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.231841 4922 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.231851 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnknl\" (UniqueName: \"kubernetes.io/projected/f679ed8d-26ea-4469-a862-a4f1dd5d07ef-kube-api-access-dnknl\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.629875 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wkdsj" event={"ID":"f679ed8d-26ea-4469-a862-a4f1dd5d07ef","Type":"ContainerDied","Data":"3c29c729c82f2a21c13911fcbf016db8f1ee033fde47e23b497f56efa4d5c5e4"} Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.629931 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c29c729c82f2a21c13911fcbf016db8f1ee033fde47e23b497f56efa4d5c5e4" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.630002 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wkdsj" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.730686 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-wkdsj"] Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.739645 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-wkdsj"] Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.820182 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-plwj5"] Sep 29 23:54:45 crc kubenswrapper[4922]: E0929 23:54:45.820612 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f679ed8d-26ea-4469-a862-a4f1dd5d07ef" containerName="keystone-bootstrap" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.820631 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f679ed8d-26ea-4469-a862-a4f1dd5d07ef" containerName="keystone-bootstrap" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.820845 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f679ed8d-26ea-4469-a862-a4f1dd5d07ef" containerName="keystone-bootstrap" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.821623 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.825933 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.826218 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.829939 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-xhhkq" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.830616 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.861807 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-plwj5"] Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.965652 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-fernet-keys\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.965770 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-credential-keys\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.965817 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-combined-ca-bundle\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.965892 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vkkn\" (UniqueName: \"kubernetes.io/projected/c5c74b25-a885-423b-9383-711519104495-kube-api-access-2vkkn\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.965978 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-scripts\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:45 crc kubenswrapper[4922]: I0929 23:54:45.966079 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-config-data\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.067295 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-fernet-keys\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.067387 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-credential-keys\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.067459 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-combined-ca-bundle\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.067535 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vkkn\" (UniqueName: \"kubernetes.io/projected/c5c74b25-a885-423b-9383-711519104495-kube-api-access-2vkkn\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.067664 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-scripts\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.067795 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-config-data\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.074949 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-scripts\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.075172 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-fernet-keys\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.077215 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-combined-ca-bundle\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.077827 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-config-data\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.082021 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-credential-keys\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.099002 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vkkn\" (UniqueName: \"kubernetes.io/projected/c5c74b25-a885-423b-9383-711519104495-kube-api-access-2vkkn\") pod \"keystone-bootstrap-plwj5\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.150424 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.438254 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f679ed8d-26ea-4469-a862-a4f1dd5d07ef" path="/var/lib/kubelet/pods/f679ed8d-26ea-4469-a862-a4f1dd5d07ef/volumes" Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.622332 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-plwj5"] Sep 29 23:54:46 crc kubenswrapper[4922]: I0929 23:54:46.643978 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-plwj5" event={"ID":"c5c74b25-a885-423b-9383-711519104495","Type":"ContainerStarted","Data":"2f092bc07c40d5a9be2370dc46b5ea52d5ea24193ebd83707c3a3303499ada19"} Sep 29 23:54:47 crc kubenswrapper[4922]: I0929 23:54:47.656250 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-plwj5" event={"ID":"c5c74b25-a885-423b-9383-711519104495","Type":"ContainerStarted","Data":"2fb38785f8873dac286f64519819801e58b3aed0e65f7442c98521139007a87d"} Sep 29 23:54:47 crc kubenswrapper[4922]: I0929 23:54:47.696026 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-plwj5" podStartSLOduration=2.695993149 podStartE2EDuration="2.695993149s" podCreationTimestamp="2025-09-29 23:54:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:54:47.688918525 +0000 UTC m=+5291.999207378" watchObservedRunningTime="2025-09-29 23:54:47.695993149 +0000 UTC m=+5292.006281992" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.120720 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.231715 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cf678f797-zn5st"] Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.231969 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" podUID="43634687-ed85-4d66-807e-77c40c3d4e6a" containerName="dnsmasq-dns" containerID="cri-o://ee9b371d6d9a4d3c4432025412c08ef62f58ca0857c33e6f25052f1752ce8b77" gracePeriod=10 Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.678951 4922 generic.go:334] "Generic (PLEG): container finished" podID="c5c74b25-a885-423b-9383-711519104495" containerID="2fb38785f8873dac286f64519819801e58b3aed0e65f7442c98521139007a87d" exitCode=0 Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.679031 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-plwj5" event={"ID":"c5c74b25-a885-423b-9383-711519104495","Type":"ContainerDied","Data":"2fb38785f8873dac286f64519819801e58b3aed0e65f7442c98521139007a87d"} Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.681691 4922 generic.go:334] "Generic (PLEG): container finished" podID="43634687-ed85-4d66-807e-77c40c3d4e6a" containerID="ee9b371d6d9a4d3c4432025412c08ef62f58ca0857c33e6f25052f1752ce8b77" exitCode=0 Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.681733 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" event={"ID":"43634687-ed85-4d66-807e-77c40c3d4e6a","Type":"ContainerDied","Data":"ee9b371d6d9a4d3c4432025412c08ef62f58ca0857c33e6f25052f1752ce8b77"} Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.681782 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" event={"ID":"43634687-ed85-4d66-807e-77c40c3d4e6a","Type":"ContainerDied","Data":"227e54c1409c9c3a1d68d48d4a439121632e5d8e9776115644122aa6bc240917"} Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.681801 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="227e54c1409c9c3a1d68d48d4a439121632e5d8e9776115644122aa6bc240917" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.695686 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.852537 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-config\") pod \"43634687-ed85-4d66-807e-77c40c3d4e6a\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.852633 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-dns-svc\") pod \"43634687-ed85-4d66-807e-77c40c3d4e6a\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.852680 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-sb\") pod \"43634687-ed85-4d66-807e-77c40c3d4e6a\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.852749 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slbs6\" (UniqueName: \"kubernetes.io/projected/43634687-ed85-4d66-807e-77c40c3d4e6a-kube-api-access-slbs6\") pod \"43634687-ed85-4d66-807e-77c40c3d4e6a\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.852777 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-nb\") pod \"43634687-ed85-4d66-807e-77c40c3d4e6a\" (UID: \"43634687-ed85-4d66-807e-77c40c3d4e6a\") " Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.859476 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43634687-ed85-4d66-807e-77c40c3d4e6a-kube-api-access-slbs6" (OuterVolumeSpecName: "kube-api-access-slbs6") pod "43634687-ed85-4d66-807e-77c40c3d4e6a" (UID: "43634687-ed85-4d66-807e-77c40c3d4e6a"). InnerVolumeSpecName "kube-api-access-slbs6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.898735 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "43634687-ed85-4d66-807e-77c40c3d4e6a" (UID: "43634687-ed85-4d66-807e-77c40c3d4e6a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.908264 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "43634687-ed85-4d66-807e-77c40c3d4e6a" (UID: "43634687-ed85-4d66-807e-77c40c3d4e6a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.913918 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-config" (OuterVolumeSpecName: "config") pod "43634687-ed85-4d66-807e-77c40c3d4e6a" (UID: "43634687-ed85-4d66-807e-77c40c3d4e6a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.917641 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "43634687-ed85-4d66-807e-77c40c3d4e6a" (UID: "43634687-ed85-4d66-807e-77c40c3d4e6a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.954824 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.955458 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.955494 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.955529 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slbs6\" (UniqueName: \"kubernetes.io/projected/43634687-ed85-4d66-807e-77c40c3d4e6a-kube-api-access-slbs6\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:49 crc kubenswrapper[4922]: I0929 23:54:49.955554 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/43634687-ed85-4d66-807e-77c40c3d4e6a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:50 crc kubenswrapper[4922]: I0929 23:54:50.692508 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cf678f797-zn5st" Sep 29 23:54:50 crc kubenswrapper[4922]: I0929 23:54:50.753456 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cf678f797-zn5st"] Sep 29 23:54:50 crc kubenswrapper[4922]: I0929 23:54:50.764509 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cf678f797-zn5st"] Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.108564 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.183589 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vkkn\" (UniqueName: \"kubernetes.io/projected/c5c74b25-a885-423b-9383-711519104495-kube-api-access-2vkkn\") pod \"c5c74b25-a885-423b-9383-711519104495\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.183696 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-credential-keys\") pod \"c5c74b25-a885-423b-9383-711519104495\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.183731 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-fernet-keys\") pod \"c5c74b25-a885-423b-9383-711519104495\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.183957 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-scripts\") pod \"c5c74b25-a885-423b-9383-711519104495\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.184020 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-combined-ca-bundle\") pod \"c5c74b25-a885-423b-9383-711519104495\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.184093 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-config-data\") pod \"c5c74b25-a885-423b-9383-711519104495\" (UID: \"c5c74b25-a885-423b-9383-711519104495\") " Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.192828 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5c74b25-a885-423b-9383-711519104495-kube-api-access-2vkkn" (OuterVolumeSpecName: "kube-api-access-2vkkn") pod "c5c74b25-a885-423b-9383-711519104495" (UID: "c5c74b25-a885-423b-9383-711519104495"). InnerVolumeSpecName "kube-api-access-2vkkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.193347 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c5c74b25-a885-423b-9383-711519104495" (UID: "c5c74b25-a885-423b-9383-711519104495"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.198930 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-scripts" (OuterVolumeSpecName: "scripts") pod "c5c74b25-a885-423b-9383-711519104495" (UID: "c5c74b25-a885-423b-9383-711519104495"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.200009 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c5c74b25-a885-423b-9383-711519104495" (UID: "c5c74b25-a885-423b-9383-711519104495"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.222720 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-config-data" (OuterVolumeSpecName: "config-data") pod "c5c74b25-a885-423b-9383-711519104495" (UID: "c5c74b25-a885-423b-9383-711519104495"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.224621 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5c74b25-a885-423b-9383-711519104495" (UID: "c5c74b25-a885-423b-9383-711519104495"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.285124 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vkkn\" (UniqueName: \"kubernetes.io/projected/c5c74b25-a885-423b-9383-711519104495-kube-api-access-2vkkn\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.285162 4922 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.285174 4922 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.285187 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.285200 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.285211 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5c74b25-a885-423b-9383-711519104495-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.711180 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-plwj5" event={"ID":"c5c74b25-a885-423b-9383-711519104495","Type":"ContainerDied","Data":"2f092bc07c40d5a9be2370dc46b5ea52d5ea24193ebd83707c3a3303499ada19"} Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.711238 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f092bc07c40d5a9be2370dc46b5ea52d5ea24193ebd83707c3a3303499ada19" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.711275 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-plwj5" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.834572 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6c4c7b57f7-q89s8"] Sep 29 23:54:51 crc kubenswrapper[4922]: E0929 23:54:51.834936 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5c74b25-a885-423b-9383-711519104495" containerName="keystone-bootstrap" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.834951 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5c74b25-a885-423b-9383-711519104495" containerName="keystone-bootstrap" Sep 29 23:54:51 crc kubenswrapper[4922]: E0929 23:54:51.834977 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43634687-ed85-4d66-807e-77c40c3d4e6a" containerName="dnsmasq-dns" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.834986 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="43634687-ed85-4d66-807e-77c40c3d4e6a" containerName="dnsmasq-dns" Sep 29 23:54:51 crc kubenswrapper[4922]: E0929 23:54:51.835004 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43634687-ed85-4d66-807e-77c40c3d4e6a" containerName="init" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.835012 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="43634687-ed85-4d66-807e-77c40c3d4e6a" containerName="init" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.835259 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="43634687-ed85-4d66-807e-77c40c3d4e6a" containerName="dnsmasq-dns" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.835286 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5c74b25-a885-423b-9383-711519104495" containerName="keystone-bootstrap" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.835928 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.838803 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.838819 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.838951 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-xhhkq" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.838818 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.863974 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6c4c7b57f7-q89s8"] Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.997726 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-credential-keys\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.998296 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-config-data\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.998497 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-combined-ca-bundle\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.998687 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzrmp\" (UniqueName: \"kubernetes.io/projected/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-kube-api-access-lzrmp\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.998923 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-fernet-keys\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:51 crc kubenswrapper[4922]: I0929 23:54:51.999141 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-scripts\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.100786 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-scripts\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.100880 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-credential-keys\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.100913 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-config-data\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.100932 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-combined-ca-bundle\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.100952 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzrmp\" (UniqueName: \"kubernetes.io/projected/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-kube-api-access-lzrmp\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.100990 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-fernet-keys\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.105237 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-combined-ca-bundle\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.105696 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-credential-keys\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.108800 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-scripts\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.111879 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-fernet-keys\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.112103 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-config-data\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.119463 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzrmp\" (UniqueName: \"kubernetes.io/projected/f66e8e06-9d1b-4a9e-88e7-7f83b5161faa-kube-api-access-lzrmp\") pod \"keystone-6c4c7b57f7-q89s8\" (UID: \"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa\") " pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.165932 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.438310 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43634687-ed85-4d66-807e-77c40c3d4e6a" path="/var/lib/kubelet/pods/43634687-ed85-4d66-807e-77c40c3d4e6a/volumes" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.447573 4922 scope.go:117] "RemoveContainer" containerID="22bb54e8b6b8c1045048b2f06696c872910b1ff91fb2b466c51a4289191254fd" Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.601569 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6c4c7b57f7-q89s8"] Sep 29 23:54:52 crc kubenswrapper[4922]: W0929 23:54:52.609226 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf66e8e06_9d1b_4a9e_88e7_7f83b5161faa.slice/crio-6034f3ae04c37709265cea0f3d3c6299eb709d5a8836aee7655d36cc49d275d9 WatchSource:0}: Error finding container 6034f3ae04c37709265cea0f3d3c6299eb709d5a8836aee7655d36cc49d275d9: Status 404 returned error can't find the container with id 6034f3ae04c37709265cea0f3d3c6299eb709d5a8836aee7655d36cc49d275d9 Sep 29 23:54:52 crc kubenswrapper[4922]: I0929 23:54:52.729339 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6c4c7b57f7-q89s8" event={"ID":"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa","Type":"ContainerStarted","Data":"6034f3ae04c37709265cea0f3d3c6299eb709d5a8836aee7655d36cc49d275d9"} Sep 29 23:54:53 crc kubenswrapper[4922]: I0929 23:54:53.744657 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6c4c7b57f7-q89s8" event={"ID":"f66e8e06-9d1b-4a9e-88e7-7f83b5161faa","Type":"ContainerStarted","Data":"7d696090dca7109de75bed9134e75f067193a4b21f740394639b2b976585ce93"} Sep 29 23:54:53 crc kubenswrapper[4922]: I0929 23:54:53.745478 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:54:53 crc kubenswrapper[4922]: I0929 23:54:53.778978 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6c4c7b57f7-q89s8" podStartSLOduration=2.778952967 podStartE2EDuration="2.778952967s" podCreationTimestamp="2025-09-29 23:54:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:54:53.770641963 +0000 UTC m=+5298.080930796" watchObservedRunningTime="2025-09-29 23:54:53.778952967 +0000 UTC m=+5298.089241790" Sep 29 23:54:54 crc kubenswrapper[4922]: I0929 23:54:54.422283 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:54:54 crc kubenswrapper[4922]: E0929 23:54:54.422892 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:55:07 crc kubenswrapper[4922]: I0929 23:55:07.422295 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:55:07 crc kubenswrapper[4922]: E0929 23:55:07.423560 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:55:22 crc kubenswrapper[4922]: I0929 23:55:22.422030 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:55:22 crc kubenswrapper[4922]: E0929 23:55:22.423138 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:55:23 crc kubenswrapper[4922]: I0929 23:55:23.489147 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6c4c7b57f7-q89s8" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.719471 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.723136 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.726795 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-ps6tf" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.727079 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.727220 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.744991 4922 status_manager.go:875] "Failed to update status for pod" pod="openstack/openstackclient" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874c5b11-fdc9-4685-b1df-c1c41abd315e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T23:55:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T23:55:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T23:55:27Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T23:55:27Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"openstackclient\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/clouds.yaml\\\",\\\"name\\\":\\\"openstack-config\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/secure.yaml\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/cloudrc\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6wnns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T23:55:27Z\\\"}}\" for pod \"openstack\"/\"openstackclient\": pods \"openstackclient\" not found" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.747736 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.761864 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 29 23:55:27 crc kubenswrapper[4922]: E0929 23:55:27.762601 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-6wnns openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="874c5b11-fdc9-4685-b1df-c1c41abd315e" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.769665 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.795489 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.796837 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.798353 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config-secret\") pod \"openstackclient\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.798737 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wnns\" (UniqueName: \"kubernetes.io/projected/874c5b11-fdc9-4685-b1df-c1c41abd315e-kube-api-access-6wnns\") pod \"openstackclient\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.798984 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config\") pod \"openstackclient\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.802668 4922 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="874c5b11-fdc9-4685-b1df-c1c41abd315e" podUID="40533406-f1f6-4c7a-9377-d8c338936f67" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.807421 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.901502 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wnns\" (UniqueName: \"kubernetes.io/projected/874c5b11-fdc9-4685-b1df-c1c41abd315e-kube-api-access-6wnns\") pod \"openstackclient\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.901615 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config\") pod \"openstackclient\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.901706 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config-secret\") pod \"openstackclient\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.901805 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config\") pod \"openstackclient\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.901848 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5b4n\" (UniqueName: \"kubernetes.io/projected/40533406-f1f6-4c7a-9377-d8c338936f67-kube-api-access-h5b4n\") pod \"openstackclient\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.901887 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config-secret\") pod \"openstackclient\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.905009 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config\") pod \"openstackclient\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " pod="openstack/openstackclient" Sep 29 23:55:27 crc kubenswrapper[4922]: E0929 23:55:27.905195 4922 projected.go:194] Error preparing data for projected volume kube-api-access-6wnns for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (874c5b11-fdc9-4685-b1df-c1c41abd315e) does not match the UID in record. The object might have been deleted and then recreated Sep 29 23:55:27 crc kubenswrapper[4922]: E0929 23:55:27.905257 4922 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/874c5b11-fdc9-4685-b1df-c1c41abd315e-kube-api-access-6wnns podName:874c5b11-fdc9-4685-b1df-c1c41abd315e nodeName:}" failed. No retries permitted until 2025-09-29 23:55:28.405238591 +0000 UTC m=+5332.715527404 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-6wnns" (UniqueName: "kubernetes.io/projected/874c5b11-fdc9-4685-b1df-c1c41abd315e-kube-api-access-6wnns") pod "openstackclient" (UID: "874c5b11-fdc9-4685-b1df-c1c41abd315e") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (874c5b11-fdc9-4685-b1df-c1c41abd315e) does not match the UID in record. The object might have been deleted and then recreated Sep 29 23:55:27 crc kubenswrapper[4922]: I0929 23:55:27.910690 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config-secret\") pod \"openstackclient\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.003730 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config-secret\") pod \"openstackclient\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.003800 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config\") pod \"openstackclient\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.003823 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5b4n\" (UniqueName: \"kubernetes.io/projected/40533406-f1f6-4c7a-9377-d8c338936f67-kube-api-access-h5b4n\") pod \"openstackclient\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.005149 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config\") pod \"openstackclient\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.007708 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config-secret\") pod \"openstackclient\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.021166 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5b4n\" (UniqueName: \"kubernetes.io/projected/40533406-f1f6-4c7a-9377-d8c338936f67-kube-api-access-h5b4n\") pod \"openstackclient\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.095147 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.098509 4922 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="874c5b11-fdc9-4685-b1df-c1c41abd315e" podUID="40533406-f1f6-4c7a-9377-d8c338936f67" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.105600 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.108361 4922 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="874c5b11-fdc9-4685-b1df-c1c41abd315e" podUID="40533406-f1f6-4c7a-9377-d8c338936f67" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.114914 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.207560 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config-secret\") pod \"874c5b11-fdc9-4685-b1df-c1c41abd315e\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.207983 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config\") pod \"874c5b11-fdc9-4685-b1df-c1c41abd315e\" (UID: \"874c5b11-fdc9-4685-b1df-c1c41abd315e\") " Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.208562 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wnns\" (UniqueName: \"kubernetes.io/projected/874c5b11-fdc9-4685-b1df-c1c41abd315e-kube-api-access-6wnns\") on node \"crc\" DevicePath \"\"" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.211702 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "874c5b11-fdc9-4685-b1df-c1c41abd315e" (UID: "874c5b11-fdc9-4685-b1df-c1c41abd315e"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.212538 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "874c5b11-fdc9-4685-b1df-c1c41abd315e" (UID: "874c5b11-fdc9-4685-b1df-c1c41abd315e"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.310453 4922 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.310494 4922 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/874c5b11-fdc9-4685-b1df-c1c41abd315e-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.392286 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 23:55:28 crc kubenswrapper[4922]: I0929 23:55:28.455468 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="874c5b11-fdc9-4685-b1df-c1c41abd315e" path="/var/lib/kubelet/pods/874c5b11-fdc9-4685-b1df-c1c41abd315e/volumes" Sep 29 23:55:29 crc kubenswrapper[4922]: I0929 23:55:29.107976 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 23:55:29 crc kubenswrapper[4922]: I0929 23:55:29.108030 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"40533406-f1f6-4c7a-9377-d8c338936f67","Type":"ContainerStarted","Data":"9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418"} Sep 29 23:55:29 crc kubenswrapper[4922]: I0929 23:55:29.108422 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"40533406-f1f6-4c7a-9377-d8c338936f67","Type":"ContainerStarted","Data":"8660f2bd5d32686d203ec2ddacb9c0a5158f0b1b10eb77b496068053f261629e"} Sep 29 23:55:29 crc kubenswrapper[4922]: I0929 23:55:29.137673 4922 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="874c5b11-fdc9-4685-b1df-c1c41abd315e" podUID="40533406-f1f6-4c7a-9377-d8c338936f67" Sep 29 23:55:29 crc kubenswrapper[4922]: I0929 23:55:29.138225 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.138198628 podStartE2EDuration="2.138198628s" podCreationTimestamp="2025-09-29 23:55:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:55:29.130332474 +0000 UTC m=+5333.440621327" watchObservedRunningTime="2025-09-29 23:55:29.138198628 +0000 UTC m=+5333.448487481" Sep 29 23:55:36 crc kubenswrapper[4922]: I0929 23:55:36.426983 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:55:36 crc kubenswrapper[4922]: E0929 23:55:36.427704 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:55:50 crc kubenswrapper[4922]: I0929 23:55:50.423347 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:55:50 crc kubenswrapper[4922]: E0929 23:55:50.424130 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:56:03 crc kubenswrapper[4922]: I0929 23:56:03.421952 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:56:03 crc kubenswrapper[4922]: E0929 23:56:03.422970 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:56:16 crc kubenswrapper[4922]: I0929 23:56:16.428172 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:56:16 crc kubenswrapper[4922]: E0929 23:56:16.429961 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:56:29 crc kubenswrapper[4922]: I0929 23:56:29.422759 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:56:29 crc kubenswrapper[4922]: E0929 23:56:29.423527 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:56:40 crc kubenswrapper[4922]: I0929 23:56:40.422656 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:56:40 crc kubenswrapper[4922]: E0929 23:56:40.423645 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:56:51 crc kubenswrapper[4922]: I0929 23:56:51.421220 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:56:51 crc kubenswrapper[4922]: E0929 23:56:51.421924 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:57:04 crc kubenswrapper[4922]: I0929 23:57:04.422738 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:57:04 crc kubenswrapper[4922]: E0929 23:57:04.423800 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:57:16 crc kubenswrapper[4922]: I0929 23:57:16.125246 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-7z76j"] Sep 29 23:57:16 crc kubenswrapper[4922]: I0929 23:57:16.126718 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7z76j" Sep 29 23:57:16 crc kubenswrapper[4922]: I0929 23:57:16.138007 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7z76j"] Sep 29 23:57:16 crc kubenswrapper[4922]: I0929 23:57:16.198241 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8xxq\" (UniqueName: \"kubernetes.io/projected/9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a-kube-api-access-q8xxq\") pod \"barbican-db-create-7z76j\" (UID: \"9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a\") " pod="openstack/barbican-db-create-7z76j" Sep 29 23:57:16 crc kubenswrapper[4922]: I0929 23:57:16.300033 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8xxq\" (UniqueName: \"kubernetes.io/projected/9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a-kube-api-access-q8xxq\") pod \"barbican-db-create-7z76j\" (UID: \"9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a\") " pod="openstack/barbican-db-create-7z76j" Sep 29 23:57:16 crc kubenswrapper[4922]: I0929 23:57:16.336290 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8xxq\" (UniqueName: \"kubernetes.io/projected/9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a-kube-api-access-q8xxq\") pod \"barbican-db-create-7z76j\" (UID: \"9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a\") " pod="openstack/barbican-db-create-7z76j" Sep 29 23:57:16 crc kubenswrapper[4922]: I0929 23:57:16.431928 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:57:16 crc kubenswrapper[4922]: E0929 23:57:16.433091 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:57:16 crc kubenswrapper[4922]: I0929 23:57:16.444199 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7z76j" Sep 29 23:57:16 crc kubenswrapper[4922]: I0929 23:57:16.937373 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7z76j"] Sep 29 23:57:16 crc kubenswrapper[4922]: W0929 23:57:16.938834 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9eccfaa2_a65a_4d40_8a7a_73b98f8ab19a.slice/crio-ec150be1280c575561ab7de977c363c0d580420d447e288e5dc734dfa8b4b6e0 WatchSource:0}: Error finding container ec150be1280c575561ab7de977c363c0d580420d447e288e5dc734dfa8b4b6e0: Status 404 returned error can't find the container with id ec150be1280c575561ab7de977c363c0d580420d447e288e5dc734dfa8b4b6e0 Sep 29 23:57:17 crc kubenswrapper[4922]: I0929 23:57:17.248596 4922 generic.go:334] "Generic (PLEG): container finished" podID="9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a" containerID="b9e7a55da4830d4a880476a53ba65d2342983ac45e3bbd020f2bfef78fcd1ba1" exitCode=0 Sep 29 23:57:17 crc kubenswrapper[4922]: I0929 23:57:17.248807 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7z76j" event={"ID":"9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a","Type":"ContainerDied","Data":"b9e7a55da4830d4a880476a53ba65d2342983ac45e3bbd020f2bfef78fcd1ba1"} Sep 29 23:57:17 crc kubenswrapper[4922]: I0929 23:57:17.249946 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7z76j" event={"ID":"9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a","Type":"ContainerStarted","Data":"ec150be1280c575561ab7de977c363c0d580420d447e288e5dc734dfa8b4b6e0"} Sep 29 23:57:18 crc kubenswrapper[4922]: I0929 23:57:18.686473 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7z76j" Sep 29 23:57:18 crc kubenswrapper[4922]: I0929 23:57:18.747528 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8xxq\" (UniqueName: \"kubernetes.io/projected/9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a-kube-api-access-q8xxq\") pod \"9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a\" (UID: \"9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a\") " Sep 29 23:57:18 crc kubenswrapper[4922]: I0929 23:57:18.755746 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a-kube-api-access-q8xxq" (OuterVolumeSpecName: "kube-api-access-q8xxq") pod "9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a" (UID: "9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a"). InnerVolumeSpecName "kube-api-access-q8xxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:57:18 crc kubenswrapper[4922]: I0929 23:57:18.849089 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8xxq\" (UniqueName: \"kubernetes.io/projected/9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a-kube-api-access-q8xxq\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:19 crc kubenswrapper[4922]: I0929 23:57:19.270529 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7z76j" event={"ID":"9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a","Type":"ContainerDied","Data":"ec150be1280c575561ab7de977c363c0d580420d447e288e5dc734dfa8b4b6e0"} Sep 29 23:57:19 crc kubenswrapper[4922]: I0929 23:57:19.270579 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec150be1280c575561ab7de977c363c0d580420d447e288e5dc734dfa8b4b6e0" Sep 29 23:57:19 crc kubenswrapper[4922]: I0929 23:57:19.270635 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7z76j" Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.241012 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-cfae-account-create-djnqq"] Sep 29 23:57:26 crc kubenswrapper[4922]: E0929 23:57:26.242221 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a" containerName="mariadb-database-create" Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.242245 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a" containerName="mariadb-database-create" Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.242558 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a" containerName="mariadb-database-create" Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.243224 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cfae-account-create-djnqq" Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.245623 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.254292 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-cfae-account-create-djnqq"] Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.335023 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-729nh\" (UniqueName: \"kubernetes.io/projected/a7bca374-9fe8-4a69-843c-2a25dfa667e0-kube-api-access-729nh\") pod \"barbican-cfae-account-create-djnqq\" (UID: \"a7bca374-9fe8-4a69-843c-2a25dfa667e0\") " pod="openstack/barbican-cfae-account-create-djnqq" Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.441320 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-729nh\" (UniqueName: \"kubernetes.io/projected/a7bca374-9fe8-4a69-843c-2a25dfa667e0-kube-api-access-729nh\") pod \"barbican-cfae-account-create-djnqq\" (UID: \"a7bca374-9fe8-4a69-843c-2a25dfa667e0\") " pod="openstack/barbican-cfae-account-create-djnqq" Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.478154 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-729nh\" (UniqueName: \"kubernetes.io/projected/a7bca374-9fe8-4a69-843c-2a25dfa667e0-kube-api-access-729nh\") pod \"barbican-cfae-account-create-djnqq\" (UID: \"a7bca374-9fe8-4a69-843c-2a25dfa667e0\") " pod="openstack/barbican-cfae-account-create-djnqq" Sep 29 23:57:26 crc kubenswrapper[4922]: I0929 23:57:26.573707 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cfae-account-create-djnqq" Sep 29 23:57:27 crc kubenswrapper[4922]: I0929 23:57:27.095967 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-cfae-account-create-djnqq"] Sep 29 23:57:27 crc kubenswrapper[4922]: W0929 23:57:27.103171 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7bca374_9fe8_4a69_843c_2a25dfa667e0.slice/crio-5dcc4fd45b8b1b43c82450e304fb6676c9495143644c97db00b10dcc227df598 WatchSource:0}: Error finding container 5dcc4fd45b8b1b43c82450e304fb6676c9495143644c97db00b10dcc227df598: Status 404 returned error can't find the container with id 5dcc4fd45b8b1b43c82450e304fb6676c9495143644c97db00b10dcc227df598 Sep 29 23:57:27 crc kubenswrapper[4922]: I0929 23:57:27.376450 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cfae-account-create-djnqq" event={"ID":"a7bca374-9fe8-4a69-843c-2a25dfa667e0","Type":"ContainerStarted","Data":"2e9aecf0361f59c55c7dcb26b537d5742f9f50a4d57c8b7b7cfb1d50a9081b84"} Sep 29 23:57:27 crc kubenswrapper[4922]: I0929 23:57:27.376517 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cfae-account-create-djnqq" event={"ID":"a7bca374-9fe8-4a69-843c-2a25dfa667e0","Type":"ContainerStarted","Data":"5dcc4fd45b8b1b43c82450e304fb6676c9495143644c97db00b10dcc227df598"} Sep 29 23:57:27 crc kubenswrapper[4922]: I0929 23:57:27.422328 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:57:27 crc kubenswrapper[4922]: E0929 23:57:27.422675 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:57:28 crc kubenswrapper[4922]: I0929 23:57:28.387762 4922 generic.go:334] "Generic (PLEG): container finished" podID="a7bca374-9fe8-4a69-843c-2a25dfa667e0" containerID="2e9aecf0361f59c55c7dcb26b537d5742f9f50a4d57c8b7b7cfb1d50a9081b84" exitCode=0 Sep 29 23:57:28 crc kubenswrapper[4922]: I0929 23:57:28.387806 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cfae-account-create-djnqq" event={"ID":"a7bca374-9fe8-4a69-843c-2a25dfa667e0","Type":"ContainerDied","Data":"2e9aecf0361f59c55c7dcb26b537d5742f9f50a4d57c8b7b7cfb1d50a9081b84"} Sep 29 23:57:28 crc kubenswrapper[4922]: I0929 23:57:28.825311 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cfae-account-create-djnqq" Sep 29 23:57:28 crc kubenswrapper[4922]: I0929 23:57:28.889679 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-729nh\" (UniqueName: \"kubernetes.io/projected/a7bca374-9fe8-4a69-843c-2a25dfa667e0-kube-api-access-729nh\") pod \"a7bca374-9fe8-4a69-843c-2a25dfa667e0\" (UID: \"a7bca374-9fe8-4a69-843c-2a25dfa667e0\") " Sep 29 23:57:28 crc kubenswrapper[4922]: I0929 23:57:28.896823 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7bca374-9fe8-4a69-843c-2a25dfa667e0-kube-api-access-729nh" (OuterVolumeSpecName: "kube-api-access-729nh") pod "a7bca374-9fe8-4a69-843c-2a25dfa667e0" (UID: "a7bca374-9fe8-4a69-843c-2a25dfa667e0"). InnerVolumeSpecName "kube-api-access-729nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:57:28 crc kubenswrapper[4922]: I0929 23:57:28.991984 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-729nh\" (UniqueName: \"kubernetes.io/projected/a7bca374-9fe8-4a69-843c-2a25dfa667e0-kube-api-access-729nh\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:29 crc kubenswrapper[4922]: I0929 23:57:29.400886 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-cfae-account-create-djnqq" event={"ID":"a7bca374-9fe8-4a69-843c-2a25dfa667e0","Type":"ContainerDied","Data":"5dcc4fd45b8b1b43c82450e304fb6676c9495143644c97db00b10dcc227df598"} Sep 29 23:57:29 crc kubenswrapper[4922]: I0929 23:57:29.400946 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5dcc4fd45b8b1b43c82450e304fb6676c9495143644c97db00b10dcc227df598" Sep 29 23:57:29 crc kubenswrapper[4922]: I0929 23:57:29.400998 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-cfae-account-create-djnqq" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.585807 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-wztdn"] Sep 29 23:57:31 crc kubenswrapper[4922]: E0929 23:57:31.586732 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7bca374-9fe8-4a69-843c-2a25dfa667e0" containerName="mariadb-account-create" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.586753 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7bca374-9fe8-4a69-843c-2a25dfa667e0" containerName="mariadb-account-create" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.586947 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7bca374-9fe8-4a69-843c-2a25dfa667e0" containerName="mariadb-account-create" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.587597 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.592990 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.597494 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ljhqn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.634782 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-wztdn"] Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.643201 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-db-sync-config-data\") pod \"barbican-db-sync-wztdn\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.643475 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-combined-ca-bundle\") pod \"barbican-db-sync-wztdn\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.643690 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttllc\" (UniqueName: \"kubernetes.io/projected/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-kube-api-access-ttllc\") pod \"barbican-db-sync-wztdn\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.745007 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttllc\" (UniqueName: \"kubernetes.io/projected/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-kube-api-access-ttllc\") pod \"barbican-db-sync-wztdn\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.745100 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-db-sync-config-data\") pod \"barbican-db-sync-wztdn\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.745116 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-combined-ca-bundle\") pod \"barbican-db-sync-wztdn\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.751207 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-combined-ca-bundle\") pod \"barbican-db-sync-wztdn\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.759853 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-db-sync-config-data\") pod \"barbican-db-sync-wztdn\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.785657 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttllc\" (UniqueName: \"kubernetes.io/projected/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-kube-api-access-ttllc\") pod \"barbican-db-sync-wztdn\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:31 crc kubenswrapper[4922]: I0929 23:57:31.920332 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:32 crc kubenswrapper[4922]: W0929 23:57:32.428065 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2dfb3142_3cf3_4586_8e2e_5e5ff9dca842.slice/crio-fc9cfa590fdc1f1062a94468e5a45a75340bc1be4b58cf51a6b2cb3950fc81d7 WatchSource:0}: Error finding container fc9cfa590fdc1f1062a94468e5a45a75340bc1be4b58cf51a6b2cb3950fc81d7: Status 404 returned error can't find the container with id fc9cfa590fdc1f1062a94468e5a45a75340bc1be4b58cf51a6b2cb3950fc81d7 Sep 29 23:57:32 crc kubenswrapper[4922]: I0929 23:57:32.434000 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-wztdn"] Sep 29 23:57:33 crc kubenswrapper[4922]: I0929 23:57:33.442247 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-wztdn" event={"ID":"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842","Type":"ContainerStarted","Data":"bde6a7e3ad9037e999cfc576642da753745d42e8f3aaa6a47ac774d5d03916fb"} Sep 29 23:57:33 crc kubenswrapper[4922]: I0929 23:57:33.442739 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-wztdn" event={"ID":"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842","Type":"ContainerStarted","Data":"fc9cfa590fdc1f1062a94468e5a45a75340bc1be4b58cf51a6b2cb3950fc81d7"} Sep 29 23:57:33 crc kubenswrapper[4922]: I0929 23:57:33.465414 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-wztdn" podStartSLOduration=2.465354219 podStartE2EDuration="2.465354219s" podCreationTimestamp="2025-09-29 23:57:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:57:33.463420671 +0000 UTC m=+5457.773709524" watchObservedRunningTime="2025-09-29 23:57:33.465354219 +0000 UTC m=+5457.775643062" Sep 29 23:57:34 crc kubenswrapper[4922]: I0929 23:57:34.468233 4922 generic.go:334] "Generic (PLEG): container finished" podID="2dfb3142-3cf3-4586-8e2e-5e5ff9dca842" containerID="bde6a7e3ad9037e999cfc576642da753745d42e8f3aaa6a47ac774d5d03916fb" exitCode=0 Sep 29 23:57:34 crc kubenswrapper[4922]: I0929 23:57:34.468596 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-wztdn" event={"ID":"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842","Type":"ContainerDied","Data":"bde6a7e3ad9037e999cfc576642da753745d42e8f3aaa6a47ac774d5d03916fb"} Sep 29 23:57:35 crc kubenswrapper[4922]: I0929 23:57:35.835678 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:35 crc kubenswrapper[4922]: I0929 23:57:35.921829 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-combined-ca-bundle\") pod \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " Sep 29 23:57:35 crc kubenswrapper[4922]: I0929 23:57:35.921916 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-db-sync-config-data\") pod \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " Sep 29 23:57:35 crc kubenswrapper[4922]: I0929 23:57:35.921975 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttllc\" (UniqueName: \"kubernetes.io/projected/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-kube-api-access-ttllc\") pod \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\" (UID: \"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842\") " Sep 29 23:57:35 crc kubenswrapper[4922]: I0929 23:57:35.930627 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2dfb3142-3cf3-4586-8e2e-5e5ff9dca842" (UID: "2dfb3142-3cf3-4586-8e2e-5e5ff9dca842"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:57:35 crc kubenswrapper[4922]: I0929 23:57:35.930711 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-kube-api-access-ttllc" (OuterVolumeSpecName: "kube-api-access-ttllc") pod "2dfb3142-3cf3-4586-8e2e-5e5ff9dca842" (UID: "2dfb3142-3cf3-4586-8e2e-5e5ff9dca842"). InnerVolumeSpecName "kube-api-access-ttllc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:57:35 crc kubenswrapper[4922]: I0929 23:57:35.956611 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2dfb3142-3cf3-4586-8e2e-5e5ff9dca842" (UID: "2dfb3142-3cf3-4586-8e2e-5e5ff9dca842"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.024555 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.024605 4922 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.024626 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttllc\" (UniqueName: \"kubernetes.io/projected/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842-kube-api-access-ttllc\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.491485 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-wztdn" event={"ID":"2dfb3142-3cf3-4586-8e2e-5e5ff9dca842","Type":"ContainerDied","Data":"fc9cfa590fdc1f1062a94468e5a45a75340bc1be4b58cf51a6b2cb3950fc81d7"} Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.491535 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc9cfa590fdc1f1062a94468e5a45a75340bc1be4b58cf51a6b2cb3950fc81d7" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.491581 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-wztdn" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.751676 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-55458695f-7vzm8"] Sep 29 23:57:36 crc kubenswrapper[4922]: E0929 23:57:36.752663 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dfb3142-3cf3-4586-8e2e-5e5ff9dca842" containerName="barbican-db-sync" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.752698 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dfb3142-3cf3-4586-8e2e-5e5ff9dca842" containerName="barbican-db-sync" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.754344 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dfb3142-3cf3-4586-8e2e-5e5ff9dca842" containerName="barbican-db-sync" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.756240 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.759512 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.760102 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-ljhqn" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.760404 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.763895 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-55458695f-7vzm8"] Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.811830 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-d76b6d974-tr884"] Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.825124 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.829552 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.829851 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d76b6d974-tr884"] Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.845285 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f83b7812-767b-4937-b1d1-2349e58b0ebe-config-data\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.845647 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f83b7812-767b-4937-b1d1-2349e58b0ebe-logs\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.845749 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f83b7812-767b-4937-b1d1-2349e58b0ebe-config-data-custom\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.845854 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xhsn\" (UniqueName: \"kubernetes.io/projected/f83b7812-767b-4937-b1d1-2349e58b0ebe-kube-api-access-5xhsn\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.845921 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f83b7812-767b-4937-b1d1-2349e58b0ebe-combined-ca-bundle\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.879862 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c78b9f95c-jhhl8"] Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.881607 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.885779 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c78b9f95c-jhhl8"] Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.938869 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-799c46f664-rfhxd"] Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.945149 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.947422 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rxz2\" (UniqueName: \"kubernetes.io/projected/7f40a860-685e-4b57-b9a5-4068206d59c0-kube-api-access-4rxz2\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.947553 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8332b0f8-9c00-4c2d-8189-3145bcf70023-combined-ca-bundle\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.947658 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-nb\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.947738 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f83b7812-767b-4937-b1d1-2349e58b0ebe-config-data\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.947812 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8332b0f8-9c00-4c2d-8189-3145bcf70023-logs\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.947898 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f83b7812-767b-4937-b1d1-2349e58b0ebe-logs\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.947979 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-config\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.948055 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-dns-svc\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.952332 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-sb\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.952508 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f83b7812-767b-4937-b1d1-2349e58b0ebe-config-data-custom\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.952720 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8332b0f8-9c00-4c2d-8189-3145bcf70023-config-data\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.952824 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xhsn\" (UniqueName: \"kubernetes.io/projected/f83b7812-767b-4937-b1d1-2349e58b0ebe-kube-api-access-5xhsn\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.952915 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f83b7812-767b-4937-b1d1-2349e58b0ebe-combined-ca-bundle\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.953032 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhqkm\" (UniqueName: \"kubernetes.io/projected/8332b0f8-9c00-4c2d-8189-3145bcf70023-kube-api-access-fhqkm\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.953141 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8332b0f8-9c00-4c2d-8189-3145bcf70023-config-data-custom\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.950262 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.948448 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f83b7812-767b-4937-b1d1-2349e58b0ebe-logs\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.954051 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-799c46f664-rfhxd"] Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.954455 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f83b7812-767b-4937-b1d1-2349e58b0ebe-config-data\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.961156 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f83b7812-767b-4937-b1d1-2349e58b0ebe-config-data-custom\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.965560 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f83b7812-767b-4937-b1d1-2349e58b0ebe-combined-ca-bundle\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:36 crc kubenswrapper[4922]: I0929 23:57:36.975149 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xhsn\" (UniqueName: \"kubernetes.io/projected/f83b7812-767b-4937-b1d1-2349e58b0ebe-kube-api-access-5xhsn\") pod \"barbican-worker-55458695f-7vzm8\" (UID: \"f83b7812-767b-4937-b1d1-2349e58b0ebe\") " pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.054954 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-config-data\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.055231 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8332b0f8-9c00-4c2d-8189-3145bcf70023-config-data\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.055373 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-combined-ca-bundle\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.055505 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhqkm\" (UniqueName: \"kubernetes.io/projected/8332b0f8-9c00-4c2d-8189-3145bcf70023-kube-api-access-fhqkm\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.055587 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8332b0f8-9c00-4c2d-8189-3145bcf70023-config-data-custom\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.055691 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rxz2\" (UniqueName: \"kubernetes.io/projected/7f40a860-685e-4b57-b9a5-4068206d59c0-kube-api-access-4rxz2\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.055780 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-logs\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.055855 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8332b0f8-9c00-4c2d-8189-3145bcf70023-combined-ca-bundle\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.055950 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-nb\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.056038 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8332b0f8-9c00-4c2d-8189-3145bcf70023-logs\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.056145 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-config\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.056246 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-config-data-custom\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.056319 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzq9g\" (UniqueName: \"kubernetes.io/projected/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-kube-api-access-jzq9g\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.056414 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-dns-svc\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.056493 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-sb\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.056670 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-nb\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.056690 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8332b0f8-9c00-4c2d-8189-3145bcf70023-logs\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.057301 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-config\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.057511 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-sb\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.057583 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-dns-svc\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.059767 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8332b0f8-9c00-4c2d-8189-3145bcf70023-config-data-custom\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.060172 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8332b0f8-9c00-4c2d-8189-3145bcf70023-config-data\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.071884 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8332b0f8-9c00-4c2d-8189-3145bcf70023-combined-ca-bundle\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.071966 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhqkm\" (UniqueName: \"kubernetes.io/projected/8332b0f8-9c00-4c2d-8189-3145bcf70023-kube-api-access-fhqkm\") pod \"barbican-keystone-listener-d76b6d974-tr884\" (UID: \"8332b0f8-9c00-4c2d-8189-3145bcf70023\") " pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.072507 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rxz2\" (UniqueName: \"kubernetes.io/projected/7f40a860-685e-4b57-b9a5-4068206d59c0-kube-api-access-4rxz2\") pod \"dnsmasq-dns-5c78b9f95c-jhhl8\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.082720 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-55458695f-7vzm8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.150055 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d76b6d974-tr884" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.158031 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-config-data\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.158117 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-combined-ca-bundle\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.158184 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-logs\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.158243 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-config-data-custom\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.158263 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzq9g\" (UniqueName: \"kubernetes.io/projected/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-kube-api-access-jzq9g\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.159050 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-logs\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.162620 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-config-data-custom\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.164047 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-config-data\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.166124 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-combined-ca-bundle\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.175253 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzq9g\" (UniqueName: \"kubernetes.io/projected/3e4d84e3-b47d-4b20-9ce7-c8ca1b439159-kube-api-access-jzq9g\") pod \"barbican-api-799c46f664-rfhxd\" (UID: \"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159\") " pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.212632 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.334939 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.522887 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-55458695f-7vzm8"] Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.656233 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d76b6d974-tr884"] Sep 29 23:57:37 crc kubenswrapper[4922]: W0929 23:57:37.669074 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8332b0f8_9c00_4c2d_8189_3145bcf70023.slice/crio-d6dc10172e9c9d6082a6b52420efc310c364ed822df7c67ebff9c17f75aaaf12 WatchSource:0}: Error finding container d6dc10172e9c9d6082a6b52420efc310c364ed822df7c67ebff9c17f75aaaf12: Status 404 returned error can't find the container with id d6dc10172e9c9d6082a6b52420efc310c364ed822df7c67ebff9c17f75aaaf12 Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.745736 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c78b9f95c-jhhl8"] Sep 29 23:57:37 crc kubenswrapper[4922]: W0929 23:57:37.766439 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f40a860_685e_4b57_b9a5_4068206d59c0.slice/crio-d90b871f0e1b7005971cb9c648aa27af6d9b21cc9976c2365b636a8ec3965475 WatchSource:0}: Error finding container d90b871f0e1b7005971cb9c648aa27af6d9b21cc9976c2365b636a8ec3965475: Status 404 returned error can't find the container with id d90b871f0e1b7005971cb9c648aa27af6d9b21cc9976c2365b636a8ec3965475 Sep 29 23:57:37 crc kubenswrapper[4922]: I0929 23:57:37.898131 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-799c46f664-rfhxd"] Sep 29 23:57:37 crc kubenswrapper[4922]: W0929 23:57:37.908055 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e4d84e3_b47d_4b20_9ce7_c8ca1b439159.slice/crio-d966c3210c7f476a0d56c8eb79d44ba73fbfe29026d5c17f664aca094a6e045c WatchSource:0}: Error finding container d966c3210c7f476a0d56c8eb79d44ba73fbfe29026d5c17f664aca094a6e045c: Status 404 returned error can't find the container with id d966c3210c7f476a0d56c8eb79d44ba73fbfe29026d5c17f664aca094a6e045c Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.510618 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d76b6d974-tr884" event={"ID":"8332b0f8-9c00-4c2d-8189-3145bcf70023","Type":"ContainerStarted","Data":"bf6b392bfb02c2b201f7ac1ac08d24b266b80ddaa0df969b04267f71e1685e25"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.511031 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d76b6d974-tr884" event={"ID":"8332b0f8-9c00-4c2d-8189-3145bcf70023","Type":"ContainerStarted","Data":"8f5dbac1f073b8bc4e7f14ae1986ff91da046b2a6f6a667ba14076eac04aa9e8"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.511043 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d76b6d974-tr884" event={"ID":"8332b0f8-9c00-4c2d-8189-3145bcf70023","Type":"ContainerStarted","Data":"d6dc10172e9c9d6082a6b52420efc310c364ed822df7c67ebff9c17f75aaaf12"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.512735 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-55458695f-7vzm8" event={"ID":"f83b7812-767b-4937-b1d1-2349e58b0ebe","Type":"ContainerStarted","Data":"acd079a70732248bab5f8adf8fe7e124769e11fe49b9b42bb5906440cfb5b874"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.512759 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-55458695f-7vzm8" event={"ID":"f83b7812-767b-4937-b1d1-2349e58b0ebe","Type":"ContainerStarted","Data":"48a5a4094f2ebea5e3ffc068962e1488be4fa84f8731d6111f669c778953b495"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.512768 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-55458695f-7vzm8" event={"ID":"f83b7812-767b-4937-b1d1-2349e58b0ebe","Type":"ContainerStarted","Data":"c967c19a12997f16cfdd09a6f9de7358608d3f0a152aa5e738b5dbf90a84fef5"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.515082 4922 generic.go:334] "Generic (PLEG): container finished" podID="7f40a860-685e-4b57-b9a5-4068206d59c0" containerID="d2073bd8a1512fd94f2b45f3be80130eb655cd91b394002127b728d174658e83" exitCode=0 Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.515549 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" event={"ID":"7f40a860-685e-4b57-b9a5-4068206d59c0","Type":"ContainerDied","Data":"d2073bd8a1512fd94f2b45f3be80130eb655cd91b394002127b728d174658e83"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.515576 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" event={"ID":"7f40a860-685e-4b57-b9a5-4068206d59c0","Type":"ContainerStarted","Data":"d90b871f0e1b7005971cb9c648aa27af6d9b21cc9976c2365b636a8ec3965475"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.517671 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-799c46f664-rfhxd" event={"ID":"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159","Type":"ContainerStarted","Data":"a3f10ca4f44b1ce003dd19f15a1c8564f271a36220b4ab4d783537fd30d58f8d"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.517782 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-799c46f664-rfhxd" event={"ID":"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159","Type":"ContainerStarted","Data":"ae25bcd50c9bb238e94be92668755c41022792d408c4cfdb602007ee100b556f"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.517844 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-799c46f664-rfhxd" event={"ID":"3e4d84e3-b47d-4b20-9ce7-c8ca1b439159","Type":"ContainerStarted","Data":"d966c3210c7f476a0d56c8eb79d44ba73fbfe29026d5c17f664aca094a6e045c"} Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.517951 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.518063 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.533202 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-d76b6d974-tr884" podStartSLOduration=2.533183273 podStartE2EDuration="2.533183273s" podCreationTimestamp="2025-09-29 23:57:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:57:38.530136638 +0000 UTC m=+5462.840425461" watchObservedRunningTime="2025-09-29 23:57:38.533183273 +0000 UTC m=+5462.843472096" Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.554363 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-55458695f-7vzm8" podStartSLOduration=2.554345353 podStartE2EDuration="2.554345353s" podCreationTimestamp="2025-09-29 23:57:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:57:38.548828558 +0000 UTC m=+5462.859117371" watchObservedRunningTime="2025-09-29 23:57:38.554345353 +0000 UTC m=+5462.864634166" Sep 29 23:57:38 crc kubenswrapper[4922]: I0929 23:57:38.572681 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-799c46f664-rfhxd" podStartSLOduration=2.572665364 podStartE2EDuration="2.572665364s" podCreationTimestamp="2025-09-29 23:57:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:57:38.569801324 +0000 UTC m=+5462.880090137" watchObservedRunningTime="2025-09-29 23:57:38.572665364 +0000 UTC m=+5462.882954177" Sep 29 23:57:39 crc kubenswrapper[4922]: I0929 23:57:39.528265 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" event={"ID":"7f40a860-685e-4b57-b9a5-4068206d59c0","Type":"ContainerStarted","Data":"65ea0735a9a64a72bcf6f86bb8a0ac0d889cdc66a87cd47b6232b07abf9f3eb3"} Sep 29 23:57:39 crc kubenswrapper[4922]: I0929 23:57:39.551055 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" podStartSLOduration=3.551038065 podStartE2EDuration="3.551038065s" podCreationTimestamp="2025-09-29 23:57:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:57:39.547001676 +0000 UTC m=+5463.857290809" watchObservedRunningTime="2025-09-29 23:57:39.551038065 +0000 UTC m=+5463.861326878" Sep 29 23:57:40 crc kubenswrapper[4922]: I0929 23:57:40.540889 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:42 crc kubenswrapper[4922]: I0929 23:57:42.423028 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:57:42 crc kubenswrapper[4922]: E0929 23:57:42.423475 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.214558 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.309790 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cbf4f8d45-r8lm9"] Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.310080 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" podUID="d649e447-9a28-4858-8390-f5f05948d127" containerName="dnsmasq-dns" containerID="cri-o://e7f868a6a0cf9c0e09ac0ffdbf6acb055837bb6e517536393e38fc4f87555015" gracePeriod=10 Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.632724 4922 generic.go:334] "Generic (PLEG): container finished" podID="d649e447-9a28-4858-8390-f5f05948d127" containerID="e7f868a6a0cf9c0e09ac0ffdbf6acb055837bb6e517536393e38fc4f87555015" exitCode=0 Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.633127 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" event={"ID":"d649e447-9a28-4858-8390-f5f05948d127","Type":"ContainerDied","Data":"e7f868a6a0cf9c0e09ac0ffdbf6acb055837bb6e517536393e38fc4f87555015"} Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.805061 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.975642 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-nb\") pod \"d649e447-9a28-4858-8390-f5f05948d127\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.975692 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdj29\" (UniqueName: \"kubernetes.io/projected/d649e447-9a28-4858-8390-f5f05948d127-kube-api-access-kdj29\") pod \"d649e447-9a28-4858-8390-f5f05948d127\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.975822 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-config\") pod \"d649e447-9a28-4858-8390-f5f05948d127\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.975952 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-dns-svc\") pod \"d649e447-9a28-4858-8390-f5f05948d127\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.976076 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-sb\") pod \"d649e447-9a28-4858-8390-f5f05948d127\" (UID: \"d649e447-9a28-4858-8390-f5f05948d127\") " Sep 29 23:57:47 crc kubenswrapper[4922]: I0929 23:57:47.994866 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d649e447-9a28-4858-8390-f5f05948d127-kube-api-access-kdj29" (OuterVolumeSpecName: "kube-api-access-kdj29") pod "d649e447-9a28-4858-8390-f5f05948d127" (UID: "d649e447-9a28-4858-8390-f5f05948d127"). InnerVolumeSpecName "kube-api-access-kdj29". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.013405 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d649e447-9a28-4858-8390-f5f05948d127" (UID: "d649e447-9a28-4858-8390-f5f05948d127"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.021097 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-config" (OuterVolumeSpecName: "config") pod "d649e447-9a28-4858-8390-f5f05948d127" (UID: "d649e447-9a28-4858-8390-f5f05948d127"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.023323 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d649e447-9a28-4858-8390-f5f05948d127" (UID: "d649e447-9a28-4858-8390-f5f05948d127"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.026284 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d649e447-9a28-4858-8390-f5f05948d127" (UID: "d649e447-9a28-4858-8390-f5f05948d127"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.077579 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.077603 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.077613 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.077624 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdj29\" (UniqueName: \"kubernetes.io/projected/d649e447-9a28-4858-8390-f5f05948d127-kube-api-access-kdj29\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.077634 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d649e447-9a28-4858-8390-f5f05948d127-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.660095 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" event={"ID":"d649e447-9a28-4858-8390-f5f05948d127","Type":"ContainerDied","Data":"e45886f72fc9b015b97e00a57e1383b5210e5455e8965528d82a235203b9533e"} Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.660465 4922 scope.go:117] "RemoveContainer" containerID="e7f868a6a0cf9c0e09ac0ffdbf6acb055837bb6e517536393e38fc4f87555015" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.660232 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5cbf4f8d45-r8lm9" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.689835 4922 scope.go:117] "RemoveContainer" containerID="c5847ddcfb1f9a7fefbaeccbc1de5e859a3ef6057479270e768f943e2007aa82" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.691793 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5cbf4f8d45-r8lm9"] Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.700768 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5cbf4f8d45-r8lm9"] Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.805770 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:48 crc kubenswrapper[4922]: I0929 23:57:48.911704 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-799c46f664-rfhxd" Sep 29 23:57:50 crc kubenswrapper[4922]: I0929 23:57:50.432304 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d649e447-9a28-4858-8390-f5f05948d127" path="/var/lib/kubelet/pods/d649e447-9a28-4858-8390-f5f05948d127/volumes" Sep 29 23:57:57 crc kubenswrapper[4922]: I0929 23:57:57.422553 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:57:57 crc kubenswrapper[4922]: E0929 23:57:57.423972 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.454569 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-c7vkh"] Sep 29 23:58:03 crc kubenswrapper[4922]: E0929 23:58:03.455807 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d649e447-9a28-4858-8390-f5f05948d127" containerName="dnsmasq-dns" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.455831 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d649e447-9a28-4858-8390-f5f05948d127" containerName="dnsmasq-dns" Sep 29 23:58:03 crc kubenswrapper[4922]: E0929 23:58:03.455876 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d649e447-9a28-4858-8390-f5f05948d127" containerName="init" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.455890 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d649e447-9a28-4858-8390-f5f05948d127" containerName="init" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.456157 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d649e447-9a28-4858-8390-f5f05948d127" containerName="dnsmasq-dns" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.457193 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-c7vkh" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.461787 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vc9bw"] Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.463566 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.473464 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-c7vkh"] Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.485366 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc9bw"] Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.594197 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slwfz\" (UniqueName: \"kubernetes.io/projected/70010ad7-7568-440e-9521-19e013b68753-kube-api-access-slwfz\") pod \"neutron-db-create-c7vkh\" (UID: \"70010ad7-7568-440e-9521-19e013b68753\") " pod="openstack/neutron-db-create-c7vkh" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.594277 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-utilities\") pod \"redhat-marketplace-vc9bw\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.594485 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-catalog-content\") pod \"redhat-marketplace-vc9bw\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.594563 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvd9f\" (UniqueName: \"kubernetes.io/projected/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-kube-api-access-hvd9f\") pod \"redhat-marketplace-vc9bw\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.696542 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slwfz\" (UniqueName: \"kubernetes.io/projected/70010ad7-7568-440e-9521-19e013b68753-kube-api-access-slwfz\") pod \"neutron-db-create-c7vkh\" (UID: \"70010ad7-7568-440e-9521-19e013b68753\") " pod="openstack/neutron-db-create-c7vkh" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.696640 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-utilities\") pod \"redhat-marketplace-vc9bw\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.696788 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-catalog-content\") pod \"redhat-marketplace-vc9bw\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.696851 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvd9f\" (UniqueName: \"kubernetes.io/projected/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-kube-api-access-hvd9f\") pod \"redhat-marketplace-vc9bw\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.697765 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-utilities\") pod \"redhat-marketplace-vc9bw\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.697803 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-catalog-content\") pod \"redhat-marketplace-vc9bw\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.717557 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvd9f\" (UniqueName: \"kubernetes.io/projected/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-kube-api-access-hvd9f\") pod \"redhat-marketplace-vc9bw\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.717589 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slwfz\" (UniqueName: \"kubernetes.io/projected/70010ad7-7568-440e-9521-19e013b68753-kube-api-access-slwfz\") pod \"neutron-db-create-c7vkh\" (UID: \"70010ad7-7568-440e-9521-19e013b68753\") " pod="openstack/neutron-db-create-c7vkh" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.792885 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-c7vkh" Sep 29 23:58:03 crc kubenswrapper[4922]: I0929 23:58:03.806916 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:04 crc kubenswrapper[4922]: I0929 23:58:04.108680 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-c7vkh"] Sep 29 23:58:04 crc kubenswrapper[4922]: W0929 23:58:04.269618 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc65dc33_c84d_4fb9_88a4_ef4fd112cc27.slice/crio-fc0c573e4af0201d221ed523286c6d737b322e689171d439e06cb36b5bfa86fa WatchSource:0}: Error finding container fc0c573e4af0201d221ed523286c6d737b322e689171d439e06cb36b5bfa86fa: Status 404 returned error can't find the container with id fc0c573e4af0201d221ed523286c6d737b322e689171d439e06cb36b5bfa86fa Sep 29 23:58:04 crc kubenswrapper[4922]: I0929 23:58:04.271149 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc9bw"] Sep 29 23:58:04 crc kubenswrapper[4922]: I0929 23:58:04.828323 4922 generic.go:334] "Generic (PLEG): container finished" podID="70010ad7-7568-440e-9521-19e013b68753" containerID="6813631fc637aecd46f709a581af8bbab2755c6296c396d7309e229fb02ef2ce" exitCode=0 Sep 29 23:58:04 crc kubenswrapper[4922]: I0929 23:58:04.828498 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-c7vkh" event={"ID":"70010ad7-7568-440e-9521-19e013b68753","Type":"ContainerDied","Data":"6813631fc637aecd46f709a581af8bbab2755c6296c396d7309e229fb02ef2ce"} Sep 29 23:58:04 crc kubenswrapper[4922]: I0929 23:58:04.828869 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-c7vkh" event={"ID":"70010ad7-7568-440e-9521-19e013b68753","Type":"ContainerStarted","Data":"0e50196e023a1a0940d1689476ef6c8c0cd6fdbe97455ee9c226b5d855803e27"} Sep 29 23:58:04 crc kubenswrapper[4922]: I0929 23:58:04.831314 4922 generic.go:334] "Generic (PLEG): container finished" podID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerID="08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b" exitCode=0 Sep 29 23:58:04 crc kubenswrapper[4922]: I0929 23:58:04.831370 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc9bw" event={"ID":"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27","Type":"ContainerDied","Data":"08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b"} Sep 29 23:58:04 crc kubenswrapper[4922]: I0929 23:58:04.831428 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc9bw" event={"ID":"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27","Type":"ContainerStarted","Data":"fc0c573e4af0201d221ed523286c6d737b322e689171d439e06cb36b5bfa86fa"} Sep 29 23:58:06 crc kubenswrapper[4922]: I0929 23:58:06.246345 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-c7vkh" Sep 29 23:58:06 crc kubenswrapper[4922]: I0929 23:58:06.347187 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slwfz\" (UniqueName: \"kubernetes.io/projected/70010ad7-7568-440e-9521-19e013b68753-kube-api-access-slwfz\") pod \"70010ad7-7568-440e-9521-19e013b68753\" (UID: \"70010ad7-7568-440e-9521-19e013b68753\") " Sep 29 23:58:06 crc kubenswrapper[4922]: I0929 23:58:06.356831 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70010ad7-7568-440e-9521-19e013b68753-kube-api-access-slwfz" (OuterVolumeSpecName: "kube-api-access-slwfz") pod "70010ad7-7568-440e-9521-19e013b68753" (UID: "70010ad7-7568-440e-9521-19e013b68753"). InnerVolumeSpecName "kube-api-access-slwfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:58:06 crc kubenswrapper[4922]: I0929 23:58:06.450447 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slwfz\" (UniqueName: \"kubernetes.io/projected/70010ad7-7568-440e-9521-19e013b68753-kube-api-access-slwfz\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:06 crc kubenswrapper[4922]: I0929 23:58:06.866434 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-c7vkh" event={"ID":"70010ad7-7568-440e-9521-19e013b68753","Type":"ContainerDied","Data":"0e50196e023a1a0940d1689476ef6c8c0cd6fdbe97455ee9c226b5d855803e27"} Sep 29 23:58:06 crc kubenswrapper[4922]: I0929 23:58:06.866949 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e50196e023a1a0940d1689476ef6c8c0cd6fdbe97455ee9c226b5d855803e27" Sep 29 23:58:06 crc kubenswrapper[4922]: I0929 23:58:06.866552 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-c7vkh" Sep 29 23:58:06 crc kubenswrapper[4922]: I0929 23:58:06.869619 4922 generic.go:334] "Generic (PLEG): container finished" podID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerID="fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405" exitCode=0 Sep 29 23:58:06 crc kubenswrapper[4922]: I0929 23:58:06.869667 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc9bw" event={"ID":"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27","Type":"ContainerDied","Data":"fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405"} Sep 29 23:58:07 crc kubenswrapper[4922]: I0929 23:58:07.885457 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc9bw" event={"ID":"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27","Type":"ContainerStarted","Data":"45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70"} Sep 29 23:58:07 crc kubenswrapper[4922]: I0929 23:58:07.917622 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vc9bw" podStartSLOduration=2.425089287 podStartE2EDuration="4.917596965s" podCreationTimestamp="2025-09-29 23:58:03 +0000 UTC" firstStartedPulling="2025-09-29 23:58:04.835302561 +0000 UTC m=+5489.145591384" lastFinishedPulling="2025-09-29 23:58:07.327810229 +0000 UTC m=+5491.638099062" observedRunningTime="2025-09-29 23:58:07.911931456 +0000 UTC m=+5492.222220279" watchObservedRunningTime="2025-09-29 23:58:07.917596965 +0000 UTC m=+5492.227885798" Sep 29 23:58:12 crc kubenswrapper[4922]: I0929 23:58:12.422559 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:58:12 crc kubenswrapper[4922]: E0929 23:58:12.423342 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.562637 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-e944-account-create-t878l"] Sep 29 23:58:13 crc kubenswrapper[4922]: E0929 23:58:13.564446 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70010ad7-7568-440e-9521-19e013b68753" containerName="mariadb-database-create" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.564611 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="70010ad7-7568-440e-9521-19e013b68753" containerName="mariadb-database-create" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.565046 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="70010ad7-7568-440e-9521-19e013b68753" containerName="mariadb-database-create" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.566186 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e944-account-create-t878l" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.597286 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.615535 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e944-account-create-t878l"] Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.696336 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82wsf\" (UniqueName: \"kubernetes.io/projected/b352964c-1637-400a-b4d5-5c4ff1bdb4a4-kube-api-access-82wsf\") pod \"neutron-e944-account-create-t878l\" (UID: \"b352964c-1637-400a-b4d5-5c4ff1bdb4a4\") " pod="openstack/neutron-e944-account-create-t878l" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.798797 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82wsf\" (UniqueName: \"kubernetes.io/projected/b352964c-1637-400a-b4d5-5c4ff1bdb4a4-kube-api-access-82wsf\") pod \"neutron-e944-account-create-t878l\" (UID: \"b352964c-1637-400a-b4d5-5c4ff1bdb4a4\") " pod="openstack/neutron-e944-account-create-t878l" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.807684 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.809309 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.832197 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82wsf\" (UniqueName: \"kubernetes.io/projected/b352964c-1637-400a-b4d5-5c4ff1bdb4a4-kube-api-access-82wsf\") pod \"neutron-e944-account-create-t878l\" (UID: \"b352964c-1637-400a-b4d5-5c4ff1bdb4a4\") " pod="openstack/neutron-e944-account-create-t878l" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.880915 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:13 crc kubenswrapper[4922]: I0929 23:58:13.934339 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e944-account-create-t878l" Sep 29 23:58:14 crc kubenswrapper[4922]: I0929 23:58:14.048158 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:14 crc kubenswrapper[4922]: I0929 23:58:14.130011 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc9bw"] Sep 29 23:58:14 crc kubenswrapper[4922]: I0929 23:58:14.489601 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-e944-account-create-t878l"] Sep 29 23:58:14 crc kubenswrapper[4922]: I0929 23:58:14.969785 4922 generic.go:334] "Generic (PLEG): container finished" podID="b352964c-1637-400a-b4d5-5c4ff1bdb4a4" containerID="033fb2006310cf3a957f70939d53a0e4dc5ae89cf462d5ff331e6fdcb52f6110" exitCode=0 Sep 29 23:58:14 crc kubenswrapper[4922]: I0929 23:58:14.971570 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e944-account-create-t878l" event={"ID":"b352964c-1637-400a-b4d5-5c4ff1bdb4a4","Type":"ContainerDied","Data":"033fb2006310cf3a957f70939d53a0e4dc5ae89cf462d5ff331e6fdcb52f6110"} Sep 29 23:58:14 crc kubenswrapper[4922]: I0929 23:58:14.971612 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e944-account-create-t878l" event={"ID":"b352964c-1637-400a-b4d5-5c4ff1bdb4a4","Type":"ContainerStarted","Data":"01f6248fbc28b9b1ea9792b079a2518d37219bac57cc4f0752ae7434b4d87812"} Sep 29 23:58:15 crc kubenswrapper[4922]: I0929 23:58:15.980522 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vc9bw" podUID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerName="registry-server" containerID="cri-o://45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70" gracePeriod=2 Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.494301 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e944-account-create-t878l" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.509758 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.654851 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82wsf\" (UniqueName: \"kubernetes.io/projected/b352964c-1637-400a-b4d5-5c4ff1bdb4a4-kube-api-access-82wsf\") pod \"b352964c-1637-400a-b4d5-5c4ff1bdb4a4\" (UID: \"b352964c-1637-400a-b4d5-5c4ff1bdb4a4\") " Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.655617 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvd9f\" (UniqueName: \"kubernetes.io/projected/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-kube-api-access-hvd9f\") pod \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.655844 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-utilities\") pod \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.655979 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-catalog-content\") pod \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\" (UID: \"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27\") " Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.661029 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b352964c-1637-400a-b4d5-5c4ff1bdb4a4-kube-api-access-82wsf" (OuterVolumeSpecName: "kube-api-access-82wsf") pod "b352964c-1637-400a-b4d5-5c4ff1bdb4a4" (UID: "b352964c-1637-400a-b4d5-5c4ff1bdb4a4"). InnerVolumeSpecName "kube-api-access-82wsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.661113 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-utilities" (OuterVolumeSpecName: "utilities") pod "fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" (UID: "fc65dc33-c84d-4fb9-88a4-ef4fd112cc27"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.661138 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-kube-api-access-hvd9f" (OuterVolumeSpecName: "kube-api-access-hvd9f") pod "fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" (UID: "fc65dc33-c84d-4fb9-88a4-ef4fd112cc27"). InnerVolumeSpecName "kube-api-access-hvd9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.675823 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" (UID: "fc65dc33-c84d-4fb9-88a4-ef4fd112cc27"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.758294 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.758329 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82wsf\" (UniqueName: \"kubernetes.io/projected/b352964c-1637-400a-b4d5-5c4ff1bdb4a4-kube-api-access-82wsf\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.758338 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvd9f\" (UniqueName: \"kubernetes.io/projected/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-kube-api-access-hvd9f\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.758347 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.992142 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-e944-account-create-t878l" event={"ID":"b352964c-1637-400a-b4d5-5c4ff1bdb4a4","Type":"ContainerDied","Data":"01f6248fbc28b9b1ea9792b079a2518d37219bac57cc4f0752ae7434b4d87812"} Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.992226 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01f6248fbc28b9b1ea9792b079a2518d37219bac57cc4f0752ae7434b4d87812" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.992185 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-e944-account-create-t878l" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.994957 4922 generic.go:334] "Generic (PLEG): container finished" podID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerID="45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70" exitCode=0 Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.995023 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc9bw" event={"ID":"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27","Type":"ContainerDied","Data":"45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70"} Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.995064 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vc9bw" event={"ID":"fc65dc33-c84d-4fb9-88a4-ef4fd112cc27","Type":"ContainerDied","Data":"fc0c573e4af0201d221ed523286c6d737b322e689171d439e06cb36b5bfa86fa"} Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.995092 4922 scope.go:117] "RemoveContainer" containerID="45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70" Sep 29 23:58:16 crc kubenswrapper[4922]: I0929 23:58:16.995302 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vc9bw" Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.024615 4922 scope.go:117] "RemoveContainer" containerID="fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405" Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.058673 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc9bw"] Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.067575 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vc9bw"] Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.074873 4922 scope.go:117] "RemoveContainer" containerID="08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b" Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.100167 4922 scope.go:117] "RemoveContainer" containerID="45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70" Sep 29 23:58:17 crc kubenswrapper[4922]: E0929 23:58:17.100953 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70\": container with ID starting with 45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70 not found: ID does not exist" containerID="45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70" Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.101002 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70"} err="failed to get container status \"45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70\": rpc error: code = NotFound desc = could not find container \"45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70\": container with ID starting with 45dd8e90bc172700baab6911d0a08983fc4704b8c6c2a489181e4c2f9b26cd70 not found: ID does not exist" Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.101035 4922 scope.go:117] "RemoveContainer" containerID="fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405" Sep 29 23:58:17 crc kubenswrapper[4922]: E0929 23:58:17.103035 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405\": container with ID starting with fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405 not found: ID does not exist" containerID="fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405" Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.103091 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405"} err="failed to get container status \"fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405\": rpc error: code = NotFound desc = could not find container \"fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405\": container with ID starting with fdfb3237008abe8e2862ad3a46a661716bb4b9e9cb0081453f61c5d630142405 not found: ID does not exist" Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.103109 4922 scope.go:117] "RemoveContainer" containerID="08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b" Sep 29 23:58:17 crc kubenswrapper[4922]: E0929 23:58:17.103437 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b\": container with ID starting with 08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b not found: ID does not exist" containerID="08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b" Sep 29 23:58:17 crc kubenswrapper[4922]: I0929 23:58:17.103461 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b"} err="failed to get container status \"08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b\": rpc error: code = NotFound desc = could not find container \"08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b\": container with ID starting with 08cf0bc3305bea079187f519765710095ad4e563c7ace683179fb84bd94b735b not found: ID does not exist" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.445073 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" path="/var/lib/kubelet/pods/fc65dc33-c84d-4fb9-88a4-ef4fd112cc27/volumes" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.823466 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-kntp5"] Sep 29 23:58:18 crc kubenswrapper[4922]: E0929 23:58:18.824081 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerName="extract-content" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.824113 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerName="extract-content" Sep 29 23:58:18 crc kubenswrapper[4922]: E0929 23:58:18.824146 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerName="extract-utilities" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.824160 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerName="extract-utilities" Sep 29 23:58:18 crc kubenswrapper[4922]: E0929 23:58:18.824188 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerName="registry-server" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.824202 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerName="registry-server" Sep 29 23:58:18 crc kubenswrapper[4922]: E0929 23:58:18.824248 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b352964c-1637-400a-b4d5-5c4ff1bdb4a4" containerName="mariadb-account-create" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.824262 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b352964c-1637-400a-b4d5-5c4ff1bdb4a4" containerName="mariadb-account-create" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.824603 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b352964c-1637-400a-b4d5-5c4ff1bdb4a4" containerName="mariadb-account-create" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.824661 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc65dc33-c84d-4fb9-88a4-ef4fd112cc27" containerName="registry-server" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.825698 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.835967 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kntp5"] Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.862491 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-x9cvq" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.862754 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.862867 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.903407 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-combined-ca-bundle\") pod \"neutron-db-sync-kntp5\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.903500 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-config\") pod \"neutron-db-sync-kntp5\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:18 crc kubenswrapper[4922]: I0929 23:58:18.903627 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-576tn\" (UniqueName: \"kubernetes.io/projected/9dc028b1-0993-4f86-a56f-d2f2043fc999-kube-api-access-576tn\") pod \"neutron-db-sync-kntp5\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:19 crc kubenswrapper[4922]: I0929 23:58:19.004696 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-combined-ca-bundle\") pod \"neutron-db-sync-kntp5\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:19 crc kubenswrapper[4922]: I0929 23:58:19.004770 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-config\") pod \"neutron-db-sync-kntp5\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:19 crc kubenswrapper[4922]: I0929 23:58:19.004808 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-576tn\" (UniqueName: \"kubernetes.io/projected/9dc028b1-0993-4f86-a56f-d2f2043fc999-kube-api-access-576tn\") pod \"neutron-db-sync-kntp5\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:19 crc kubenswrapper[4922]: I0929 23:58:19.010505 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-combined-ca-bundle\") pod \"neutron-db-sync-kntp5\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:19 crc kubenswrapper[4922]: I0929 23:58:19.019132 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-config\") pod \"neutron-db-sync-kntp5\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:19 crc kubenswrapper[4922]: I0929 23:58:19.031804 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-576tn\" (UniqueName: \"kubernetes.io/projected/9dc028b1-0993-4f86-a56f-d2f2043fc999-kube-api-access-576tn\") pod \"neutron-db-sync-kntp5\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:19 crc kubenswrapper[4922]: I0929 23:58:19.192128 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:19 crc kubenswrapper[4922]: I0929 23:58:19.682134 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-kntp5"] Sep 29 23:58:19 crc kubenswrapper[4922]: W0929 23:58:19.687784 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9dc028b1_0993_4f86_a56f_d2f2043fc999.slice/crio-a741e9e763a743abef8b94d46c8a774edd2108a8ea06d202c812021c0977827a WatchSource:0}: Error finding container a741e9e763a743abef8b94d46c8a774edd2108a8ea06d202c812021c0977827a: Status 404 returned error can't find the container with id a741e9e763a743abef8b94d46c8a774edd2108a8ea06d202c812021c0977827a Sep 29 23:58:20 crc kubenswrapper[4922]: I0929 23:58:20.028984 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kntp5" event={"ID":"9dc028b1-0993-4f86-a56f-d2f2043fc999","Type":"ContainerStarted","Data":"1ac76f79b13f78a2c1fee8b2b222d54e1edd1598b39125b838a73bca6494dec5"} Sep 29 23:58:20 crc kubenswrapper[4922]: I0929 23:58:20.029061 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kntp5" event={"ID":"9dc028b1-0993-4f86-a56f-d2f2043fc999","Type":"ContainerStarted","Data":"a741e9e763a743abef8b94d46c8a774edd2108a8ea06d202c812021c0977827a"} Sep 29 23:58:24 crc kubenswrapper[4922]: I0929 23:58:24.080065 4922 generic.go:334] "Generic (PLEG): container finished" podID="9dc028b1-0993-4f86-a56f-d2f2043fc999" containerID="1ac76f79b13f78a2c1fee8b2b222d54e1edd1598b39125b838a73bca6494dec5" exitCode=0 Sep 29 23:58:24 crc kubenswrapper[4922]: I0929 23:58:24.080266 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kntp5" event={"ID":"9dc028b1-0993-4f86-a56f-d2f2043fc999","Type":"ContainerDied","Data":"1ac76f79b13f78a2c1fee8b2b222d54e1edd1598b39125b838a73bca6494dec5"} Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.498242 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.648291 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-combined-ca-bundle\") pod \"9dc028b1-0993-4f86-a56f-d2f2043fc999\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.648435 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-576tn\" (UniqueName: \"kubernetes.io/projected/9dc028b1-0993-4f86-a56f-d2f2043fc999-kube-api-access-576tn\") pod \"9dc028b1-0993-4f86-a56f-d2f2043fc999\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.648460 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-config\") pod \"9dc028b1-0993-4f86-a56f-d2f2043fc999\" (UID: \"9dc028b1-0993-4f86-a56f-d2f2043fc999\") " Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.654570 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dc028b1-0993-4f86-a56f-d2f2043fc999-kube-api-access-576tn" (OuterVolumeSpecName: "kube-api-access-576tn") pod "9dc028b1-0993-4f86-a56f-d2f2043fc999" (UID: "9dc028b1-0993-4f86-a56f-d2f2043fc999"). InnerVolumeSpecName "kube-api-access-576tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.675693 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-config" (OuterVolumeSpecName: "config") pod "9dc028b1-0993-4f86-a56f-d2f2043fc999" (UID: "9dc028b1-0993-4f86-a56f-d2f2043fc999"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.689587 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9dc028b1-0993-4f86-a56f-d2f2043fc999" (UID: "9dc028b1-0993-4f86-a56f-d2f2043fc999"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.750449 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-576tn\" (UniqueName: \"kubernetes.io/projected/9dc028b1-0993-4f86-a56f-d2f2043fc999-kube-api-access-576tn\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.750494 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:25 crc kubenswrapper[4922]: I0929 23:58:25.750515 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dc028b1-0993-4f86-a56f-d2f2043fc999-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.108449 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-kntp5" event={"ID":"9dc028b1-0993-4f86-a56f-d2f2043fc999","Type":"ContainerDied","Data":"a741e9e763a743abef8b94d46c8a774edd2108a8ea06d202c812021c0977827a"} Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.108995 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a741e9e763a743abef8b94d46c8a774edd2108a8ea06d202c812021c0977827a" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.108588 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-kntp5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.412541 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-69557444b9-qbrx5"] Sep 29 23:58:26 crc kubenswrapper[4922]: E0929 23:58:26.412969 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dc028b1-0993-4f86-a56f-d2f2043fc999" containerName="neutron-db-sync" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.412988 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dc028b1-0993-4f86-a56f-d2f2043fc999" containerName="neutron-db-sync" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.413203 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dc028b1-0993-4f86-a56f-d2f2043fc999" containerName="neutron-db-sync" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.414324 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.436876 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69557444b9-qbrx5"] Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.447874 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-c95485957-cs59k"] Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.450904 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.454130 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-x9cvq" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.454383 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.457482 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.467155 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-c95485957-cs59k"] Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.565828 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45f42243-1e24-462d-a7a4-5f4ce2ae749d-httpd-config\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.566074 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45f42243-1e24-462d-a7a4-5f4ce2ae749d-config\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.566271 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-config\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.566379 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-dns-svc\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.566462 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25std\" (UniqueName: \"kubernetes.io/projected/852f7c8b-f89d-4799-927b-0168b1352db8-kube-api-access-25std\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.566490 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-sb\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.566552 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-nb\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.566663 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45f42243-1e24-462d-a7a4-5f4ce2ae749d-combined-ca-bundle\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.566715 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dvcr\" (UniqueName: \"kubernetes.io/projected/45f42243-1e24-462d-a7a4-5f4ce2ae749d-kube-api-access-6dvcr\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.668676 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45f42243-1e24-462d-a7a4-5f4ce2ae749d-config\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.668768 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-config\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.668803 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-dns-svc\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.668826 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25std\" (UniqueName: \"kubernetes.io/projected/852f7c8b-f89d-4799-927b-0168b1352db8-kube-api-access-25std\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.668848 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-sb\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.668868 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-nb\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.668907 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45f42243-1e24-462d-a7a4-5f4ce2ae749d-combined-ca-bundle\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.668941 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dvcr\" (UniqueName: \"kubernetes.io/projected/45f42243-1e24-462d-a7a4-5f4ce2ae749d-kube-api-access-6dvcr\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.668972 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45f42243-1e24-462d-a7a4-5f4ce2ae749d-httpd-config\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.669613 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-dns-svc\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.669989 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-config\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.670058 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-nb\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.670293 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-sb\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.672561 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45f42243-1e24-462d-a7a4-5f4ce2ae749d-httpd-config\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.672658 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/45f42243-1e24-462d-a7a4-5f4ce2ae749d-config\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.672868 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45f42243-1e24-462d-a7a4-5f4ce2ae749d-combined-ca-bundle\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.688072 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25std\" (UniqueName: \"kubernetes.io/projected/852f7c8b-f89d-4799-927b-0168b1352db8-kube-api-access-25std\") pod \"dnsmasq-dns-69557444b9-qbrx5\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.691752 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dvcr\" (UniqueName: \"kubernetes.io/projected/45f42243-1e24-462d-a7a4-5f4ce2ae749d-kube-api-access-6dvcr\") pod \"neutron-c95485957-cs59k\" (UID: \"45f42243-1e24-462d-a7a4-5f4ce2ae749d\") " pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.743237 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:26 crc kubenswrapper[4922]: I0929 23:58:26.783980 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:27 crc kubenswrapper[4922]: I0929 23:58:27.295831 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-69557444b9-qbrx5"] Sep 29 23:58:27 crc kubenswrapper[4922]: I0929 23:58:27.422536 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:58:27 crc kubenswrapper[4922]: E0929 23:58:27.422720 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:58:27 crc kubenswrapper[4922]: I0929 23:58:27.487379 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-c95485957-cs59k"] Sep 29 23:58:27 crc kubenswrapper[4922]: W0929 23:58:27.488559 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45f42243_1e24_462d_a7a4_5f4ce2ae749d.slice/crio-569b3999c6140bc8448b90df45997d8fd25e0dc6c54e2f0ed8394eb65918c11c WatchSource:0}: Error finding container 569b3999c6140bc8448b90df45997d8fd25e0dc6c54e2f0ed8394eb65918c11c: Status 404 returned error can't find the container with id 569b3999c6140bc8448b90df45997d8fd25e0dc6c54e2f0ed8394eb65918c11c Sep 29 23:58:28 crc kubenswrapper[4922]: I0929 23:58:28.123955 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c95485957-cs59k" event={"ID":"45f42243-1e24-462d-a7a4-5f4ce2ae749d","Type":"ContainerStarted","Data":"d178c7508161e483f325763aaa300814219db9d47eb50bb3322340cbe69ae6cb"} Sep 29 23:58:28 crc kubenswrapper[4922]: I0929 23:58:28.124290 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c95485957-cs59k" event={"ID":"45f42243-1e24-462d-a7a4-5f4ce2ae749d","Type":"ContainerStarted","Data":"dc540c30d761e003524d638f05575d1826b12aea07435582ad9f7096e75bfecb"} Sep 29 23:58:28 crc kubenswrapper[4922]: I0929 23:58:28.124302 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c95485957-cs59k" event={"ID":"45f42243-1e24-462d-a7a4-5f4ce2ae749d","Type":"ContainerStarted","Data":"569b3999c6140bc8448b90df45997d8fd25e0dc6c54e2f0ed8394eb65918c11c"} Sep 29 23:58:28 crc kubenswrapper[4922]: I0929 23:58:28.124320 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-c95485957-cs59k" Sep 29 23:58:28 crc kubenswrapper[4922]: I0929 23:58:28.126289 4922 generic.go:334] "Generic (PLEG): container finished" podID="852f7c8b-f89d-4799-927b-0168b1352db8" containerID="8040fc5934ead7cb92dc7291c29a76d5267fc54e73b2722e800215ac38ab4554" exitCode=0 Sep 29 23:58:28 crc kubenswrapper[4922]: I0929 23:58:28.126335 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" event={"ID":"852f7c8b-f89d-4799-927b-0168b1352db8","Type":"ContainerDied","Data":"8040fc5934ead7cb92dc7291c29a76d5267fc54e73b2722e800215ac38ab4554"} Sep 29 23:58:28 crc kubenswrapper[4922]: I0929 23:58:28.126363 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" event={"ID":"852f7c8b-f89d-4799-927b-0168b1352db8","Type":"ContainerStarted","Data":"5f181106b9e7fbed4c87a57e7dd9e8b99b3624c68ec38eea070497659bbd90db"} Sep 29 23:58:28 crc kubenswrapper[4922]: I0929 23:58:28.164427 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-c95485957-cs59k" podStartSLOduration=2.164405935 podStartE2EDuration="2.164405935s" podCreationTimestamp="2025-09-29 23:58:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:58:28.148279658 +0000 UTC m=+5512.458568511" watchObservedRunningTime="2025-09-29 23:58:28.164405935 +0000 UTC m=+5512.474694748" Sep 29 23:58:29 crc kubenswrapper[4922]: I0929 23:58:29.146238 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" event={"ID":"852f7c8b-f89d-4799-927b-0168b1352db8","Type":"ContainerStarted","Data":"177dd394c33d0a1e1a92c7340672e0cffb044c590405abdba16aafdee8319958"} Sep 29 23:58:29 crc kubenswrapper[4922]: I0929 23:58:29.146772 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:29 crc kubenswrapper[4922]: I0929 23:58:29.175546 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" podStartSLOduration=3.175525582 podStartE2EDuration="3.175525582s" podCreationTimestamp="2025-09-29 23:58:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:58:29.171744509 +0000 UTC m=+5513.482033342" watchObservedRunningTime="2025-09-29 23:58:29.175525582 +0000 UTC m=+5513.485814405" Sep 29 23:58:36 crc kubenswrapper[4922]: I0929 23:58:36.745379 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:58:36 crc kubenswrapper[4922]: I0929 23:58:36.808882 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c78b9f95c-jhhl8"] Sep 29 23:58:36 crc kubenswrapper[4922]: I0929 23:58:36.809182 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" podUID="7f40a860-685e-4b57-b9a5-4068206d59c0" containerName="dnsmasq-dns" containerID="cri-o://65ea0735a9a64a72bcf6f86bb8a0ac0d889cdc66a87cd47b6232b07abf9f3eb3" gracePeriod=10 Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.229598 4922 generic.go:334] "Generic (PLEG): container finished" podID="7f40a860-685e-4b57-b9a5-4068206d59c0" containerID="65ea0735a9a64a72bcf6f86bb8a0ac0d889cdc66a87cd47b6232b07abf9f3eb3" exitCode=0 Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.229954 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" event={"ID":"7f40a860-685e-4b57-b9a5-4068206d59c0","Type":"ContainerDied","Data":"65ea0735a9a64a72bcf6f86bb8a0ac0d889cdc66a87cd47b6232b07abf9f3eb3"} Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.321528 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.373319 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rxz2\" (UniqueName: \"kubernetes.io/projected/7f40a860-685e-4b57-b9a5-4068206d59c0-kube-api-access-4rxz2\") pod \"7f40a860-685e-4b57-b9a5-4068206d59c0\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.373383 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-sb\") pod \"7f40a860-685e-4b57-b9a5-4068206d59c0\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.373451 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-dns-svc\") pod \"7f40a860-685e-4b57-b9a5-4068206d59c0\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.373497 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-config\") pod \"7f40a860-685e-4b57-b9a5-4068206d59c0\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.373611 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-nb\") pod \"7f40a860-685e-4b57-b9a5-4068206d59c0\" (UID: \"7f40a860-685e-4b57-b9a5-4068206d59c0\") " Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.381436 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f40a860-685e-4b57-b9a5-4068206d59c0-kube-api-access-4rxz2" (OuterVolumeSpecName: "kube-api-access-4rxz2") pod "7f40a860-685e-4b57-b9a5-4068206d59c0" (UID: "7f40a860-685e-4b57-b9a5-4068206d59c0"). InnerVolumeSpecName "kube-api-access-4rxz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.422285 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7f40a860-685e-4b57-b9a5-4068206d59c0" (UID: "7f40a860-685e-4b57-b9a5-4068206d59c0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.435431 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7f40a860-685e-4b57-b9a5-4068206d59c0" (UID: "7f40a860-685e-4b57-b9a5-4068206d59c0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.441502 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-config" (OuterVolumeSpecName: "config") pod "7f40a860-685e-4b57-b9a5-4068206d59c0" (UID: "7f40a860-685e-4b57-b9a5-4068206d59c0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.458157 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7f40a860-685e-4b57-b9a5-4068206d59c0" (UID: "7f40a860-685e-4b57-b9a5-4068206d59c0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.475847 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.475890 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.475906 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.475918 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7f40a860-685e-4b57-b9a5-4068206d59c0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:37 crc kubenswrapper[4922]: I0929 23:58:37.475932 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rxz2\" (UniqueName: \"kubernetes.io/projected/7f40a860-685e-4b57-b9a5-4068206d59c0-kube-api-access-4rxz2\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.242223 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" event={"ID":"7f40a860-685e-4b57-b9a5-4068206d59c0","Type":"ContainerDied","Data":"d90b871f0e1b7005971cb9c648aa27af6d9b21cc9976c2365b636a8ec3965475"} Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.242284 4922 scope.go:117] "RemoveContainer" containerID="65ea0735a9a64a72bcf6f86bb8a0ac0d889cdc66a87cd47b6232b07abf9f3eb3" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.242357 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.264577 4922 scope.go:117] "RemoveContainer" containerID="d2073bd8a1512fd94f2b45f3be80130eb655cd91b394002127b728d174658e83" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.360328 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rtpcf"] Sep 29 23:58:38 crc kubenswrapper[4922]: E0929 23:58:38.360643 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f40a860-685e-4b57-b9a5-4068206d59c0" containerName="dnsmasq-dns" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.360659 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f40a860-685e-4b57-b9a5-4068206d59c0" containerName="dnsmasq-dns" Sep 29 23:58:38 crc kubenswrapper[4922]: E0929 23:58:38.360696 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f40a860-685e-4b57-b9a5-4068206d59c0" containerName="init" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.360702 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f40a860-685e-4b57-b9a5-4068206d59c0" containerName="init" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.360849 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f40a860-685e-4b57-b9a5-4068206d59c0" containerName="dnsmasq-dns" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.362022 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.373647 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rtpcf"] Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.384198 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c78b9f95c-jhhl8"] Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.390540 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c78b9f95c-jhhl8"] Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.431660 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f40a860-685e-4b57-b9a5-4068206d59c0" path="/var/lib/kubelet/pods/7f40a860-685e-4b57-b9a5-4068206d59c0/volumes" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.493183 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldxmr\" (UniqueName: \"kubernetes.io/projected/5685cfce-aa03-472c-bf0c-ff5dd631a559-kube-api-access-ldxmr\") pod \"community-operators-rtpcf\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.493251 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-catalog-content\") pod \"community-operators-rtpcf\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.493322 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-utilities\") pod \"community-operators-rtpcf\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.509589 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4w9ss"] Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.511416 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.516219 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4w9ss"] Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.594527 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-utilities\") pod \"community-operators-rtpcf\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.594604 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-utilities\") pod \"certified-operators-4w9ss\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.594679 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-catalog-content\") pod \"certified-operators-4w9ss\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.594736 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldxmr\" (UniqueName: \"kubernetes.io/projected/5685cfce-aa03-472c-bf0c-ff5dd631a559-kube-api-access-ldxmr\") pod \"community-operators-rtpcf\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.594773 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x48fh\" (UniqueName: \"kubernetes.io/projected/a8fb4334-30b8-4fae-a170-463ea8c0727d-kube-api-access-x48fh\") pod \"certified-operators-4w9ss\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.594808 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-catalog-content\") pod \"community-operators-rtpcf\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.595023 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-utilities\") pod \"community-operators-rtpcf\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.595183 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-catalog-content\") pod \"community-operators-rtpcf\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.614578 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldxmr\" (UniqueName: \"kubernetes.io/projected/5685cfce-aa03-472c-bf0c-ff5dd631a559-kube-api-access-ldxmr\") pod \"community-operators-rtpcf\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.686722 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.696532 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-catalog-content\") pod \"certified-operators-4w9ss\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.696738 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x48fh\" (UniqueName: \"kubernetes.io/projected/a8fb4334-30b8-4fae-a170-463ea8c0727d-kube-api-access-x48fh\") pod \"certified-operators-4w9ss\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.696887 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-utilities\") pod \"certified-operators-4w9ss\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.697027 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-catalog-content\") pod \"certified-operators-4w9ss\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.697209 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-utilities\") pod \"certified-operators-4w9ss\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.715175 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x48fh\" (UniqueName: \"kubernetes.io/projected/a8fb4334-30b8-4fae-a170-463ea8c0727d-kube-api-access-x48fh\") pod \"certified-operators-4w9ss\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:38 crc kubenswrapper[4922]: I0929 23:58:38.825141 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:39 crc kubenswrapper[4922]: I0929 23:58:39.005206 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rtpcf"] Sep 29 23:58:39 crc kubenswrapper[4922]: I0929 23:58:39.056990 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4w9ss"] Sep 29 23:58:39 crc kubenswrapper[4922]: I0929 23:58:39.250549 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4w9ss" event={"ID":"a8fb4334-30b8-4fae-a170-463ea8c0727d","Type":"ContainerStarted","Data":"1d535240621224fe2f935d19464d8eb16c9985aa26e1a5620223d65aa2bd9dd3"} Sep 29 23:58:39 crc kubenswrapper[4922]: I0929 23:58:39.264512 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtpcf" event={"ID":"5685cfce-aa03-472c-bf0c-ff5dd631a559","Type":"ContainerStarted","Data":"7cb026995ca0c43684639b3f50473067a329cd50a20b32d09c411b755d27b56e"} Sep 29 23:58:40 crc kubenswrapper[4922]: I0929 23:58:40.278864 4922 generic.go:334] "Generic (PLEG): container finished" podID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerID="b214cd2d2834e45a4bfd48fdab338f7686b2ac9e612f30fd43aa404c70f464ea" exitCode=0 Sep 29 23:58:40 crc kubenswrapper[4922]: I0929 23:58:40.279049 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4w9ss" event={"ID":"a8fb4334-30b8-4fae-a170-463ea8c0727d","Type":"ContainerDied","Data":"b214cd2d2834e45a4bfd48fdab338f7686b2ac9e612f30fd43aa404c70f464ea"} Sep 29 23:58:40 crc kubenswrapper[4922]: I0929 23:58:40.285384 4922 generic.go:334] "Generic (PLEG): container finished" podID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerID="59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40" exitCode=0 Sep 29 23:58:40 crc kubenswrapper[4922]: I0929 23:58:40.285476 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtpcf" event={"ID":"5685cfce-aa03-472c-bf0c-ff5dd631a559","Type":"ContainerDied","Data":"59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40"} Sep 29 23:58:41 crc kubenswrapper[4922]: I0929 23:58:41.299465 4922 generic.go:334] "Generic (PLEG): container finished" podID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerID="73b4994a0ae7b420263fdc055994f47debe62dd290c18fb43144ffecd9953566" exitCode=0 Sep 29 23:58:41 crc kubenswrapper[4922]: I0929 23:58:41.299653 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4w9ss" event={"ID":"a8fb4334-30b8-4fae-a170-463ea8c0727d","Type":"ContainerDied","Data":"73b4994a0ae7b420263fdc055994f47debe62dd290c18fb43144ffecd9953566"} Sep 29 23:58:41 crc kubenswrapper[4922]: I0929 23:58:41.422318 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:58:41 crc kubenswrapper[4922]: E0929 23:58:41.422772 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:58:42 crc kubenswrapper[4922]: I0929 23:58:42.215844 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c78b9f95c-jhhl8" podUID="7f40a860-685e-4b57-b9a5-4068206d59c0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.25:5353: i/o timeout" Sep 29 23:58:42 crc kubenswrapper[4922]: I0929 23:58:42.312999 4922 generic.go:334] "Generic (PLEG): container finished" podID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerID="3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818" exitCode=0 Sep 29 23:58:42 crc kubenswrapper[4922]: I0929 23:58:42.313161 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtpcf" event={"ID":"5685cfce-aa03-472c-bf0c-ff5dd631a559","Type":"ContainerDied","Data":"3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818"} Sep 29 23:58:42 crc kubenswrapper[4922]: I0929 23:58:42.316610 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4w9ss" event={"ID":"a8fb4334-30b8-4fae-a170-463ea8c0727d","Type":"ContainerStarted","Data":"bfd62d9aaf6cc975b3ba1c6a21f1e9f65eeefda41452b6066500515dc6430129"} Sep 29 23:58:42 crc kubenswrapper[4922]: I0929 23:58:42.363449 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4w9ss" podStartSLOduration=2.877857679 podStartE2EDuration="4.363429502s" podCreationTimestamp="2025-09-29 23:58:38 +0000 UTC" firstStartedPulling="2025-09-29 23:58:40.281183823 +0000 UTC m=+5524.591472676" lastFinishedPulling="2025-09-29 23:58:41.766755646 +0000 UTC m=+5526.077044499" observedRunningTime="2025-09-29 23:58:42.362801357 +0000 UTC m=+5526.673090220" watchObservedRunningTime="2025-09-29 23:58:42.363429502 +0000 UTC m=+5526.673718315" Sep 29 23:58:43 crc kubenswrapper[4922]: I0929 23:58:43.331114 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtpcf" event={"ID":"5685cfce-aa03-472c-bf0c-ff5dd631a559","Type":"ContainerStarted","Data":"4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e"} Sep 29 23:58:43 crc kubenswrapper[4922]: I0929 23:58:43.363285 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rtpcf" podStartSLOduration=2.91852255 podStartE2EDuration="5.363267481s" podCreationTimestamp="2025-09-29 23:58:38 +0000 UTC" firstStartedPulling="2025-09-29 23:58:40.292631035 +0000 UTC m=+5524.602919878" lastFinishedPulling="2025-09-29 23:58:42.737375956 +0000 UTC m=+5527.047664809" observedRunningTime="2025-09-29 23:58:43.35592324 +0000 UTC m=+5527.666212053" watchObservedRunningTime="2025-09-29 23:58:43.363267481 +0000 UTC m=+5527.673556294" Sep 29 23:58:48 crc kubenswrapper[4922]: I0929 23:58:48.687983 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:48 crc kubenswrapper[4922]: I0929 23:58:48.688364 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:48 crc kubenswrapper[4922]: I0929 23:58:48.760443 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:48 crc kubenswrapper[4922]: I0929 23:58:48.827294 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:48 crc kubenswrapper[4922]: I0929 23:58:48.827509 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:48 crc kubenswrapper[4922]: I0929 23:58:48.909618 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:49 crc kubenswrapper[4922]: I0929 23:58:49.485243 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:49 crc kubenswrapper[4922]: I0929 23:58:49.513787 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:51 crc kubenswrapper[4922]: I0929 23:58:51.553092 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rtpcf"] Sep 29 23:58:51 crc kubenswrapper[4922]: I0929 23:58:51.553522 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rtpcf" podUID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerName="registry-server" containerID="cri-o://4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e" gracePeriod=2 Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.142327 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.202118 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-utilities\") pod \"5685cfce-aa03-472c-bf0c-ff5dd631a559\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.202167 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-catalog-content\") pod \"5685cfce-aa03-472c-bf0c-ff5dd631a559\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.202244 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldxmr\" (UniqueName: \"kubernetes.io/projected/5685cfce-aa03-472c-bf0c-ff5dd631a559-kube-api-access-ldxmr\") pod \"5685cfce-aa03-472c-bf0c-ff5dd631a559\" (UID: \"5685cfce-aa03-472c-bf0c-ff5dd631a559\") " Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.203219 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-utilities" (OuterVolumeSpecName: "utilities") pod "5685cfce-aa03-472c-bf0c-ff5dd631a559" (UID: "5685cfce-aa03-472c-bf0c-ff5dd631a559"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.213799 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5685cfce-aa03-472c-bf0c-ff5dd631a559-kube-api-access-ldxmr" (OuterVolumeSpecName: "kube-api-access-ldxmr") pod "5685cfce-aa03-472c-bf0c-ff5dd631a559" (UID: "5685cfce-aa03-472c-bf0c-ff5dd631a559"). InnerVolumeSpecName "kube-api-access-ldxmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.304958 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.305013 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldxmr\" (UniqueName: \"kubernetes.io/projected/5685cfce-aa03-472c-bf0c-ff5dd631a559-kube-api-access-ldxmr\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.422507 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:58:52 crc kubenswrapper[4922]: E0929 23:58:52.423146 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.455342 4922 generic.go:334] "Generic (PLEG): container finished" podID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerID="4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e" exitCode=0 Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.455450 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtpcf" event={"ID":"5685cfce-aa03-472c-bf0c-ff5dd631a559","Type":"ContainerDied","Data":"4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e"} Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.455497 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtpcf" event={"ID":"5685cfce-aa03-472c-bf0c-ff5dd631a559","Type":"ContainerDied","Data":"7cb026995ca0c43684639b3f50473067a329cd50a20b32d09c411b755d27b56e"} Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.455514 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtpcf" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.455530 4922 scope.go:117] "RemoveContainer" containerID="4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.490209 4922 scope.go:117] "RemoveContainer" containerID="3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.524912 4922 scope.go:117] "RemoveContainer" containerID="59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.527114 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5685cfce-aa03-472c-bf0c-ff5dd631a559" (UID: "5685cfce-aa03-472c-bf0c-ff5dd631a559"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.610885 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5685cfce-aa03-472c-bf0c-ff5dd631a559-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.617946 4922 scope.go:117] "RemoveContainer" containerID="4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e" Sep 29 23:58:52 crc kubenswrapper[4922]: E0929 23:58:52.621901 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e\": container with ID starting with 4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e not found: ID does not exist" containerID="4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.621967 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e"} err="failed to get container status \"4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e\": rpc error: code = NotFound desc = could not find container \"4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e\": container with ID starting with 4d389b28e0de691b31f2d351f72315f1ca530d2dcf3d2a86f7b0adcb5316858e not found: ID does not exist" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.622009 4922 scope.go:117] "RemoveContainer" containerID="3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818" Sep 29 23:58:52 crc kubenswrapper[4922]: E0929 23:58:52.622442 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818\": container with ID starting with 3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818 not found: ID does not exist" containerID="3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.622485 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818"} err="failed to get container status \"3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818\": rpc error: code = NotFound desc = could not find container \"3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818\": container with ID starting with 3bcc65a650ec6592cd810f3916996d62770a8676effad0f5bd518837dfe72818 not found: ID does not exist" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.622513 4922 scope.go:117] "RemoveContainer" containerID="59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40" Sep 29 23:58:52 crc kubenswrapper[4922]: E0929 23:58:52.622879 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40\": container with ID starting with 59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40 not found: ID does not exist" containerID="59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.622925 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40"} err="failed to get container status \"59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40\": rpc error: code = NotFound desc = could not find container \"59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40\": container with ID starting with 59d349d625a470bbb264af6c48e67cbb2785bec31afc8503f061212f50e52c40 not found: ID does not exist" Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.818031 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rtpcf"] Sep 29 23:58:52 crc kubenswrapper[4922]: I0929 23:58:52.830572 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rtpcf"] Sep 29 23:58:53 crc kubenswrapper[4922]: I0929 23:58:53.310166 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4w9ss"] Sep 29 23:58:53 crc kubenswrapper[4922]: I0929 23:58:53.310593 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4w9ss" podUID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerName="registry-server" containerID="cri-o://bfd62d9aaf6cc975b3ba1c6a21f1e9f65eeefda41452b6066500515dc6430129" gracePeriod=2 Sep 29 23:58:53 crc kubenswrapper[4922]: I0929 23:58:53.470477 4922 generic.go:334] "Generic (PLEG): container finished" podID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerID="bfd62d9aaf6cc975b3ba1c6a21f1e9f65eeefda41452b6066500515dc6430129" exitCode=0 Sep 29 23:58:53 crc kubenswrapper[4922]: I0929 23:58:53.470912 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4w9ss" event={"ID":"a8fb4334-30b8-4fae-a170-463ea8c0727d","Type":"ContainerDied","Data":"bfd62d9aaf6cc975b3ba1c6a21f1e9f65eeefda41452b6066500515dc6430129"} Sep 29 23:58:53 crc kubenswrapper[4922]: I0929 23:58:53.877956 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.057786 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x48fh\" (UniqueName: \"kubernetes.io/projected/a8fb4334-30b8-4fae-a170-463ea8c0727d-kube-api-access-x48fh\") pod \"a8fb4334-30b8-4fae-a170-463ea8c0727d\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.057885 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-catalog-content\") pod \"a8fb4334-30b8-4fae-a170-463ea8c0727d\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.057953 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-utilities\") pod \"a8fb4334-30b8-4fae-a170-463ea8c0727d\" (UID: \"a8fb4334-30b8-4fae-a170-463ea8c0727d\") " Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.059671 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-utilities" (OuterVolumeSpecName: "utilities") pod "a8fb4334-30b8-4fae-a170-463ea8c0727d" (UID: "a8fb4334-30b8-4fae-a170-463ea8c0727d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.066696 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8fb4334-30b8-4fae-a170-463ea8c0727d-kube-api-access-x48fh" (OuterVolumeSpecName: "kube-api-access-x48fh") pod "a8fb4334-30b8-4fae-a170-463ea8c0727d" (UID: "a8fb4334-30b8-4fae-a170-463ea8c0727d"). InnerVolumeSpecName "kube-api-access-x48fh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.109576 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8fb4334-30b8-4fae-a170-463ea8c0727d" (UID: "a8fb4334-30b8-4fae-a170-463ea8c0727d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.163158 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.163261 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x48fh\" (UniqueName: \"kubernetes.io/projected/a8fb4334-30b8-4fae-a170-463ea8c0727d-kube-api-access-x48fh\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.163286 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8fb4334-30b8-4fae-a170-463ea8c0727d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.433964 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5685cfce-aa03-472c-bf0c-ff5dd631a559" path="/var/lib/kubelet/pods/5685cfce-aa03-472c-bf0c-ff5dd631a559/volumes" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.485767 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4w9ss" event={"ID":"a8fb4334-30b8-4fae-a170-463ea8c0727d","Type":"ContainerDied","Data":"1d535240621224fe2f935d19464d8eb16c9985aa26e1a5620223d65aa2bd9dd3"} Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.485847 4922 scope.go:117] "RemoveContainer" containerID="bfd62d9aaf6cc975b3ba1c6a21f1e9f65eeefda41452b6066500515dc6430129" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.486218 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4w9ss" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.516512 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4w9ss"] Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.521969 4922 scope.go:117] "RemoveContainer" containerID="73b4994a0ae7b420263fdc055994f47debe62dd290c18fb43144ffecd9953566" Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.524465 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4w9ss"] Sep 29 23:58:54 crc kubenswrapper[4922]: I0929 23:58:54.540185 4922 scope.go:117] "RemoveContainer" containerID="b214cd2d2834e45a4bfd48fdab338f7686b2ac9e612f30fd43aa404c70f464ea" Sep 29 23:58:56 crc kubenswrapper[4922]: I0929 23:58:56.444258 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8fb4334-30b8-4fae-a170-463ea8c0727d" path="/var/lib/kubelet/pods/a8fb4334-30b8-4fae-a170-463ea8c0727d/volumes" Sep 29 23:58:56 crc kubenswrapper[4922]: I0929 23:58:56.809032 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-c95485957-cs59k" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.507650 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-tp6hx"] Sep 29 23:59:04 crc kubenswrapper[4922]: E0929 23:59:04.508577 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerName="extract-utilities" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.508592 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerName="extract-utilities" Sep 29 23:59:04 crc kubenswrapper[4922]: E0929 23:59:04.508612 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerName="extract-utilities" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.508619 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerName="extract-utilities" Sep 29 23:59:04 crc kubenswrapper[4922]: E0929 23:59:04.508644 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerName="registry-server" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.508651 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerName="registry-server" Sep 29 23:59:04 crc kubenswrapper[4922]: E0929 23:59:04.508661 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerName="extract-content" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.508667 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerName="extract-content" Sep 29 23:59:04 crc kubenswrapper[4922]: E0929 23:59:04.508675 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerName="registry-server" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.508680 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerName="registry-server" Sep 29 23:59:04 crc kubenswrapper[4922]: E0929 23:59:04.508690 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerName="extract-content" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.508696 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerName="extract-content" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.508849 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="5685cfce-aa03-472c-bf0c-ff5dd631a559" containerName="registry-server" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.508867 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8fb4334-30b8-4fae-a170-463ea8c0727d" containerName="registry-server" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.509583 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tp6hx" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.518365 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-tp6hx"] Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.691375 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tsm2\" (UniqueName: \"kubernetes.io/projected/784405e9-bb71-4647-9ef8-143eef93e57b-kube-api-access-4tsm2\") pod \"glance-db-create-tp6hx\" (UID: \"784405e9-bb71-4647-9ef8-143eef93e57b\") " pod="openstack/glance-db-create-tp6hx" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.793541 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tsm2\" (UniqueName: \"kubernetes.io/projected/784405e9-bb71-4647-9ef8-143eef93e57b-kube-api-access-4tsm2\") pod \"glance-db-create-tp6hx\" (UID: \"784405e9-bb71-4647-9ef8-143eef93e57b\") " pod="openstack/glance-db-create-tp6hx" Sep 29 23:59:04 crc kubenswrapper[4922]: I0929 23:59:04.827411 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tsm2\" (UniqueName: \"kubernetes.io/projected/784405e9-bb71-4647-9ef8-143eef93e57b-kube-api-access-4tsm2\") pod \"glance-db-create-tp6hx\" (UID: \"784405e9-bb71-4647-9ef8-143eef93e57b\") " pod="openstack/glance-db-create-tp6hx" Sep 29 23:59:05 crc kubenswrapper[4922]: I0929 23:59:05.127619 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tp6hx" Sep 29 23:59:05 crc kubenswrapper[4922]: I0929 23:59:05.422253 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:59:05 crc kubenswrapper[4922]: E0929 23:59:05.422871 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:59:05 crc kubenswrapper[4922]: I0929 23:59:05.620276 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-tp6hx"] Sep 29 23:59:05 crc kubenswrapper[4922]: I0929 23:59:05.662031 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tp6hx" event={"ID":"784405e9-bb71-4647-9ef8-143eef93e57b","Type":"ContainerStarted","Data":"9b513048246445f45d366520b2e70084a4c9c592c3f96c33ba38dba1f1504cf2"} Sep 29 23:59:06 crc kubenswrapper[4922]: I0929 23:59:06.673432 4922 generic.go:334] "Generic (PLEG): container finished" podID="784405e9-bb71-4647-9ef8-143eef93e57b" containerID="215430d776de9256f863e9e8acdb8e99807a899057b0f9a192516d48a098fc45" exitCode=0 Sep 29 23:59:06 crc kubenswrapper[4922]: I0929 23:59:06.673500 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tp6hx" event={"ID":"784405e9-bb71-4647-9ef8-143eef93e57b","Type":"ContainerDied","Data":"215430d776de9256f863e9e8acdb8e99807a899057b0f9a192516d48a098fc45"} Sep 29 23:59:08 crc kubenswrapper[4922]: I0929 23:59:08.099678 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tp6hx" Sep 29 23:59:08 crc kubenswrapper[4922]: I0929 23:59:08.255592 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tsm2\" (UniqueName: \"kubernetes.io/projected/784405e9-bb71-4647-9ef8-143eef93e57b-kube-api-access-4tsm2\") pod \"784405e9-bb71-4647-9ef8-143eef93e57b\" (UID: \"784405e9-bb71-4647-9ef8-143eef93e57b\") " Sep 29 23:59:08 crc kubenswrapper[4922]: I0929 23:59:08.262199 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/784405e9-bb71-4647-9ef8-143eef93e57b-kube-api-access-4tsm2" (OuterVolumeSpecName: "kube-api-access-4tsm2") pod "784405e9-bb71-4647-9ef8-143eef93e57b" (UID: "784405e9-bb71-4647-9ef8-143eef93e57b"). InnerVolumeSpecName "kube-api-access-4tsm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:59:08 crc kubenswrapper[4922]: I0929 23:59:08.357325 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tsm2\" (UniqueName: \"kubernetes.io/projected/784405e9-bb71-4647-9ef8-143eef93e57b-kube-api-access-4tsm2\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:08 crc kubenswrapper[4922]: I0929 23:59:08.689697 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-tp6hx" event={"ID":"784405e9-bb71-4647-9ef8-143eef93e57b","Type":"ContainerDied","Data":"9b513048246445f45d366520b2e70084a4c9c592c3f96c33ba38dba1f1504cf2"} Sep 29 23:59:08 crc kubenswrapper[4922]: I0929 23:59:08.689739 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b513048246445f45d366520b2e70084a4c9c592c3f96c33ba38dba1f1504cf2" Sep 29 23:59:08 crc kubenswrapper[4922]: I0929 23:59:08.689751 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-tp6hx" Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.591475 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-47b4-account-create-vljxb"] Sep 29 23:59:14 crc kubenswrapper[4922]: E0929 23:59:14.592868 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="784405e9-bb71-4647-9ef8-143eef93e57b" containerName="mariadb-database-create" Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.592895 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="784405e9-bb71-4647-9ef8-143eef93e57b" containerName="mariadb-database-create" Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.593238 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="784405e9-bb71-4647-9ef8-143eef93e57b" containerName="mariadb-database-create" Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.596236 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-47b4-account-create-vljxb" Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.602610 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.614552 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-47b4-account-create-vljxb"] Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.687767 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvj4n\" (UniqueName: \"kubernetes.io/projected/992f8cd6-9e7f-49d9-acfc-71e1c077379d-kube-api-access-pvj4n\") pod \"glance-47b4-account-create-vljxb\" (UID: \"992f8cd6-9e7f-49d9-acfc-71e1c077379d\") " pod="openstack/glance-47b4-account-create-vljxb" Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.789468 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvj4n\" (UniqueName: \"kubernetes.io/projected/992f8cd6-9e7f-49d9-acfc-71e1c077379d-kube-api-access-pvj4n\") pod \"glance-47b4-account-create-vljxb\" (UID: \"992f8cd6-9e7f-49d9-acfc-71e1c077379d\") " pod="openstack/glance-47b4-account-create-vljxb" Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.823146 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvj4n\" (UniqueName: \"kubernetes.io/projected/992f8cd6-9e7f-49d9-acfc-71e1c077379d-kube-api-access-pvj4n\") pod \"glance-47b4-account-create-vljxb\" (UID: \"992f8cd6-9e7f-49d9-acfc-71e1c077379d\") " pod="openstack/glance-47b4-account-create-vljxb" Sep 29 23:59:14 crc kubenswrapper[4922]: I0929 23:59:14.942145 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-47b4-account-create-vljxb" Sep 29 23:59:15 crc kubenswrapper[4922]: I0929 23:59:15.484012 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-47b4-account-create-vljxb"] Sep 29 23:59:15 crc kubenswrapper[4922]: I0929 23:59:15.776011 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-47b4-account-create-vljxb" event={"ID":"992f8cd6-9e7f-49d9-acfc-71e1c077379d","Type":"ContainerStarted","Data":"dc0fe7970669a51deafd842c4d458f5c2264003689bbabad721e175b51fc05c5"} Sep 29 23:59:16 crc kubenswrapper[4922]: I0929 23:59:16.433160 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:59:16 crc kubenswrapper[4922]: E0929 23:59:16.434135 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 29 23:59:16 crc kubenswrapper[4922]: I0929 23:59:16.792038 4922 generic.go:334] "Generic (PLEG): container finished" podID="992f8cd6-9e7f-49d9-acfc-71e1c077379d" containerID="989651c77172e61e6acb59a9eaea0610a044fa55e95d10cb2f672d3388ee6a32" exitCode=0 Sep 29 23:59:16 crc kubenswrapper[4922]: I0929 23:59:16.792079 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-47b4-account-create-vljxb" event={"ID":"992f8cd6-9e7f-49d9-acfc-71e1c077379d","Type":"ContainerDied","Data":"989651c77172e61e6acb59a9eaea0610a044fa55e95d10cb2f672d3388ee6a32"} Sep 29 23:59:18 crc kubenswrapper[4922]: I0929 23:59:18.240462 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-47b4-account-create-vljxb" Sep 29 23:59:18 crc kubenswrapper[4922]: I0929 23:59:18.306113 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvj4n\" (UniqueName: \"kubernetes.io/projected/992f8cd6-9e7f-49d9-acfc-71e1c077379d-kube-api-access-pvj4n\") pod \"992f8cd6-9e7f-49d9-acfc-71e1c077379d\" (UID: \"992f8cd6-9e7f-49d9-acfc-71e1c077379d\") " Sep 29 23:59:18 crc kubenswrapper[4922]: I0929 23:59:18.318136 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/992f8cd6-9e7f-49d9-acfc-71e1c077379d-kube-api-access-pvj4n" (OuterVolumeSpecName: "kube-api-access-pvj4n") pod "992f8cd6-9e7f-49d9-acfc-71e1c077379d" (UID: "992f8cd6-9e7f-49d9-acfc-71e1c077379d"). InnerVolumeSpecName "kube-api-access-pvj4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:59:18 crc kubenswrapper[4922]: I0929 23:59:18.408164 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvj4n\" (UniqueName: \"kubernetes.io/projected/992f8cd6-9e7f-49d9-acfc-71e1c077379d-kube-api-access-pvj4n\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:18 crc kubenswrapper[4922]: I0929 23:59:18.819364 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-47b4-account-create-vljxb" event={"ID":"992f8cd6-9e7f-49d9-acfc-71e1c077379d","Type":"ContainerDied","Data":"dc0fe7970669a51deafd842c4d458f5c2264003689bbabad721e175b51fc05c5"} Sep 29 23:59:18 crc kubenswrapper[4922]: I0929 23:59:18.819807 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc0fe7970669a51deafd842c4d458f5c2264003689bbabad721e175b51fc05c5" Sep 29 23:59:18 crc kubenswrapper[4922]: I0929 23:59:18.819476 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-47b4-account-create-vljxb" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.734067 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-6xwbm"] Sep 29 23:59:19 crc kubenswrapper[4922]: E0929 23:59:19.734523 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="992f8cd6-9e7f-49d9-acfc-71e1c077379d" containerName="mariadb-account-create" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.734539 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="992f8cd6-9e7f-49d9-acfc-71e1c077379d" containerName="mariadb-account-create" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.734827 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="992f8cd6-9e7f-49d9-acfc-71e1c077379d" containerName="mariadb-account-create" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.735492 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.739472 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.739964 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-znlnx" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.759086 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-6xwbm"] Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.835244 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glvjq\" (UniqueName: \"kubernetes.io/projected/d2b532ba-4446-49c0-aa22-263f4ddf0a61-kube-api-access-glvjq\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.835310 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-config-data\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.835422 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-combined-ca-bundle\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.835499 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-db-sync-config-data\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.936931 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glvjq\" (UniqueName: \"kubernetes.io/projected/d2b532ba-4446-49c0-aa22-263f4ddf0a61-kube-api-access-glvjq\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.936978 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-config-data\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.937018 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-combined-ca-bundle\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.937049 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-db-sync-config-data\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.943604 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-combined-ca-bundle\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.955243 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-config-data\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.959165 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glvjq\" (UniqueName: \"kubernetes.io/projected/d2b532ba-4446-49c0-aa22-263f4ddf0a61-kube-api-access-glvjq\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:19 crc kubenswrapper[4922]: I0929 23:59:19.966862 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-db-sync-config-data\") pod \"glance-db-sync-6xwbm\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:20 crc kubenswrapper[4922]: I0929 23:59:20.068452 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:20 crc kubenswrapper[4922]: I0929 23:59:20.687669 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-6xwbm"] Sep 29 23:59:20 crc kubenswrapper[4922]: I0929 23:59:20.841311 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6xwbm" event={"ID":"d2b532ba-4446-49c0-aa22-263f4ddf0a61","Type":"ContainerStarted","Data":"b5150b9db6810489e833c8d527a2c9d1df7ddc32cbb8d538137c738e845130f0"} Sep 29 23:59:21 crc kubenswrapper[4922]: I0929 23:59:21.855647 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6xwbm" event={"ID":"d2b532ba-4446-49c0-aa22-263f4ddf0a61","Type":"ContainerStarted","Data":"46507cc70678c5b8aa6449d332718e8f6811a1f165793ef1473e29534f398fcc"} Sep 29 23:59:21 crc kubenswrapper[4922]: I0929 23:59:21.884618 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-6xwbm" podStartSLOduration=2.884594281 podStartE2EDuration="2.884594281s" podCreationTimestamp="2025-09-29 23:59:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:59:21.875932647 +0000 UTC m=+5566.186221480" watchObservedRunningTime="2025-09-29 23:59:21.884594281 +0000 UTC m=+5566.194883114" Sep 29 23:59:24 crc kubenswrapper[4922]: I0929 23:59:24.891111 4922 generic.go:334] "Generic (PLEG): container finished" podID="d2b532ba-4446-49c0-aa22-263f4ddf0a61" containerID="46507cc70678c5b8aa6449d332718e8f6811a1f165793ef1473e29534f398fcc" exitCode=0 Sep 29 23:59:24 crc kubenswrapper[4922]: I0929 23:59:24.891238 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6xwbm" event={"ID":"d2b532ba-4446-49c0-aa22-263f4ddf0a61","Type":"ContainerDied","Data":"46507cc70678c5b8aa6449d332718e8f6811a1f165793ef1473e29534f398fcc"} Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.470360 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.583133 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-db-sync-config-data\") pod \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.583214 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-config-data\") pod \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.583266 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-combined-ca-bundle\") pod \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.583379 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glvjq\" (UniqueName: \"kubernetes.io/projected/d2b532ba-4446-49c0-aa22-263f4ddf0a61-kube-api-access-glvjq\") pod \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\" (UID: \"d2b532ba-4446-49c0-aa22-263f4ddf0a61\") " Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.591568 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d2b532ba-4446-49c0-aa22-263f4ddf0a61" (UID: "d2b532ba-4446-49c0-aa22-263f4ddf0a61"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.591943 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2b532ba-4446-49c0-aa22-263f4ddf0a61-kube-api-access-glvjq" (OuterVolumeSpecName: "kube-api-access-glvjq") pod "d2b532ba-4446-49c0-aa22-263f4ddf0a61" (UID: "d2b532ba-4446-49c0-aa22-263f4ddf0a61"). InnerVolumeSpecName "kube-api-access-glvjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.627220 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2b532ba-4446-49c0-aa22-263f4ddf0a61" (UID: "d2b532ba-4446-49c0-aa22-263f4ddf0a61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.678369 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-config-data" (OuterVolumeSpecName: "config-data") pod "d2b532ba-4446-49c0-aa22-263f4ddf0a61" (UID: "d2b532ba-4446-49c0-aa22-263f4ddf0a61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.686810 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glvjq\" (UniqueName: \"kubernetes.io/projected/d2b532ba-4446-49c0-aa22-263f4ddf0a61-kube-api-access-glvjq\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.686856 4922 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.686871 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.686884 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2b532ba-4446-49c0-aa22-263f4ddf0a61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.920387 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6xwbm" event={"ID":"d2b532ba-4446-49c0-aa22-263f4ddf0a61","Type":"ContainerDied","Data":"b5150b9db6810489e833c8d527a2c9d1df7ddc32cbb8d538137c738e845130f0"} Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.920476 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5150b9db6810489e833c8d527a2c9d1df7ddc32cbb8d538137c738e845130f0" Sep 29 23:59:26 crc kubenswrapper[4922]: I0929 23:59:26.920970 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6xwbm" Sep 29 23:59:27 crc kubenswrapper[4922]: E0929 23:59:27.075296 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2b532ba_4446_49c0_aa22_263f4ddf0a61.slice/crio-b5150b9db6810489e833c8d527a2c9d1df7ddc32cbb8d538137c738e845130f0\": RecentStats: unable to find data in memory cache]" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.305083 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 23:59:27 crc kubenswrapper[4922]: E0929 23:59:27.305490 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2b532ba-4446-49c0-aa22-263f4ddf0a61" containerName="glance-db-sync" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.305506 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2b532ba-4446-49c0-aa22-263f4ddf0a61" containerName="glance-db-sync" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.305672 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2b532ba-4446-49c0-aa22-263f4ddf0a61" containerName="glance-db-sync" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.306550 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.308064 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.308666 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.308799 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-znlnx" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.309885 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.327593 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.401760 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-scripts\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.402211 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-config-data\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.402344 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m25c9\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-kube-api-access-m25c9\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.402580 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.402698 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-logs\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.402795 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-ceph\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.402842 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.404658 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c6d4bccbf-f4cdf"] Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.406426 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.418078 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6d4bccbf-f4cdf"] Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.482761 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.484178 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.486001 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.499261 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.504677 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfcw9\" (UniqueName: \"kubernetes.io/projected/8cdf4dda-d85c-4255-a0ff-5d410af37a54-kube-api-access-qfcw9\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.504713 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505016 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-scripts\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505051 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-config-data\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505074 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505106 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpx68\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-kube-api-access-lpx68\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505136 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-logs\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505163 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m25c9\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-kube-api-access-m25c9\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505180 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505199 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-scripts\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505221 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-config-data\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505246 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-config\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505264 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505289 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-logs\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505329 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-ceph\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505348 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.505367 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-ceph\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.506983 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-logs\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.507172 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.507218 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-dns-svc\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.507253 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.513360 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-ceph\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.513831 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-config-data\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.517957 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-scripts\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.518562 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.524199 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m25c9\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-kube-api-access-m25c9\") pod \"glance-default-external-api-0\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609080 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfcw9\" (UniqueName: \"kubernetes.io/projected/8cdf4dda-d85c-4255-a0ff-5d410af37a54-kube-api-access-qfcw9\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609121 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609147 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609162 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpx68\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-kube-api-access-lpx68\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609182 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-logs\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609209 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609228 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-scripts\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609247 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-config-data\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609270 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-config\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609310 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-ceph\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609326 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.609344 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-dns-svc\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.610069 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-dns-svc\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.617631 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.618330 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-config\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.621047 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.623888 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-logs\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.624150 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.624704 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.626076 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-scripts\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.628910 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.634932 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-ceph\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.636487 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-config-data\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.659175 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfcw9\" (UniqueName: \"kubernetes.io/projected/8cdf4dda-d85c-4255-a0ff-5d410af37a54-kube-api-access-qfcw9\") pod \"dnsmasq-dns-6c6d4bccbf-f4cdf\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.680039 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpx68\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-kube-api-access-lpx68\") pod \"glance-default-internal-api-0\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.729808 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:27 crc kubenswrapper[4922]: I0929 23:59:27.797787 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.181601 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.234955 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6d4bccbf-f4cdf"] Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.352358 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 23:59:28 crc kubenswrapper[4922]: W0929 23:59:28.362668 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21baf463_7fe1_45d3_9e8a_97b7dd2ce051.slice/crio-e5b4f8f25b417b76b32262bb716dc9e1d6baf1a55e173531171af7eb28ea78de WatchSource:0}: Error finding container e5b4f8f25b417b76b32262bb716dc9e1d6baf1a55e173531171af7eb28ea78de: Status 404 returned error can't find the container with id e5b4f8f25b417b76b32262bb716dc9e1d6baf1a55e173531171af7eb28ea78de Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.748658 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.937275 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7","Type":"ContainerStarted","Data":"c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb"} Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.937320 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7","Type":"ContainerStarted","Data":"9c2283fc114e96c496093660c2dc9e390521e166fc34f792c2577aa6af4dd809"} Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.942280 4922 generic.go:334] "Generic (PLEG): container finished" podID="8cdf4dda-d85c-4255-a0ff-5d410af37a54" containerID="05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6" exitCode=0 Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.942363 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" event={"ID":"8cdf4dda-d85c-4255-a0ff-5d410af37a54","Type":"ContainerDied","Data":"05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6"} Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.942425 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" event={"ID":"8cdf4dda-d85c-4255-a0ff-5d410af37a54","Type":"ContainerStarted","Data":"8e6d9c4443b142a5e4c9cc7938ef9b4f61ab327602f350fb8cbf023590b175c4"} Sep 29 23:59:28 crc kubenswrapper[4922]: I0929 23:59:28.944170 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21baf463-7fe1-45d3-9e8a-97b7dd2ce051","Type":"ContainerStarted","Data":"e5b4f8f25b417b76b32262bb716dc9e1d6baf1a55e173531171af7eb28ea78de"} Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.421298 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.960542 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" event={"ID":"8cdf4dda-d85c-4255-a0ff-5d410af37a54","Type":"ContainerStarted","Data":"f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e"} Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.961077 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.973914 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"86380e93f24d31c94f945d418afe09f1011e2f6445c628fb457b28215153e610"} Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.978463 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21baf463-7fe1-45d3-9e8a-97b7dd2ce051","Type":"ContainerStarted","Data":"3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948"} Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.978508 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21baf463-7fe1-45d3-9e8a-97b7dd2ce051","Type":"ContainerStarted","Data":"69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c"} Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.981593 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7","Type":"ContainerStarted","Data":"4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2"} Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.981702 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerName="glance-log" containerID="cri-o://c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb" gracePeriod=30 Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.981959 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerName="glance-httpd" containerID="cri-o://4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2" gracePeriod=30 Sep 29 23:59:29 crc kubenswrapper[4922]: I0929 23:59:29.998115 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" podStartSLOduration=2.998101127 podStartE2EDuration="2.998101127s" podCreationTimestamp="2025-09-29 23:59:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:59:29.994863077 +0000 UTC m=+5574.305151890" watchObservedRunningTime="2025-09-29 23:59:29.998101127 +0000 UTC m=+5574.308389940" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.030576 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.030552546 podStartE2EDuration="3.030552546s" podCreationTimestamp="2025-09-29 23:59:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:59:30.024835095 +0000 UTC m=+5574.335123948" watchObservedRunningTime="2025-09-29 23:59:30.030552546 +0000 UTC m=+5574.340841369" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.058212 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.058195656 podStartE2EDuration="3.058195656s" podCreationTimestamp="2025-09-29 23:59:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:59:30.055787127 +0000 UTC m=+5574.366075940" watchObservedRunningTime="2025-09-29 23:59:30.058195656 +0000 UTC m=+5574.368484469" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.633691 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.777104 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-httpd-run\") pod \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.777167 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m25c9\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-kube-api-access-m25c9\") pod \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.777264 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-combined-ca-bundle\") pod \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.777289 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-config-data\") pod \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.777305 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-ceph\") pod \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.777340 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-logs\") pod \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.777378 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-scripts\") pod \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\" (UID: \"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7\") " Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.779485 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" (UID: "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.779763 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-logs" (OuterVolumeSpecName: "logs") pod "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" (UID: "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.783990 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-ceph" (OuterVolumeSpecName: "ceph") pod "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" (UID: "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.784049 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-kube-api-access-m25c9" (OuterVolumeSpecName: "kube-api-access-m25c9") pod "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" (UID: "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7"). InnerVolumeSpecName "kube-api-access-m25c9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.787572 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-scripts" (OuterVolumeSpecName: "scripts") pod "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" (UID: "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.813876 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" (UID: "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.855665 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-config-data" (OuterVolumeSpecName: "config-data") pod "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" (UID: "f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.878634 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.878660 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.878670 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.878678 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.878688 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.878698 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:30 crc kubenswrapper[4922]: I0929 23:59:30.878708 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m25c9\" (UniqueName: \"kubernetes.io/projected/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7-kube-api-access-m25c9\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.001845 4922 generic.go:334] "Generic (PLEG): container finished" podID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerID="4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2" exitCode=0 Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.001912 4922 generic.go:334] "Generic (PLEG): container finished" podID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerID="c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb" exitCode=143 Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.001971 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.001975 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7","Type":"ContainerDied","Data":"4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2"} Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.002128 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7","Type":"ContainerDied","Data":"c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb"} Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.002158 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7","Type":"ContainerDied","Data":"9c2283fc114e96c496093660c2dc9e390521e166fc34f792c2577aa6af4dd809"} Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.002192 4922 scope.go:117] "RemoveContainer" containerID="4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.033984 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.058564 4922 scope.go:117] "RemoveContainer" containerID="c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.059464 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.068166 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.083091 4922 scope.go:117] "RemoveContainer" containerID="4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2" Sep 29 23:59:31 crc kubenswrapper[4922]: E0929 23:59:31.083705 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2\": container with ID starting with 4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2 not found: ID does not exist" containerID="4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.083782 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2"} err="failed to get container status \"4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2\": rpc error: code = NotFound desc = could not find container \"4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2\": container with ID starting with 4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2 not found: ID does not exist" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.083814 4922 scope.go:117] "RemoveContainer" containerID="c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb" Sep 29 23:59:31 crc kubenswrapper[4922]: E0929 23:59:31.084206 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb\": container with ID starting with c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb not found: ID does not exist" containerID="c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.084244 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb"} err="failed to get container status \"c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb\": rpc error: code = NotFound desc = could not find container \"c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb\": container with ID starting with c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb not found: ID does not exist" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.084273 4922 scope.go:117] "RemoveContainer" containerID="4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.084570 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2"} err="failed to get container status \"4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2\": rpc error: code = NotFound desc = could not find container \"4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2\": container with ID starting with 4ecdeac0ed1afdcba02244f897e9dbf57ff27a63a4baa8db92e12da411b016d2 not found: ID does not exist" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.084594 4922 scope.go:117] "RemoveContainer" containerID="c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.084916 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb"} err="failed to get container status \"c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb\": rpc error: code = NotFound desc = could not find container \"c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb\": container with ID starting with c2595b64be0d7fe48dd6f75fed3b712e4abb1d3bdf84d8deef88a6e4414e22fb not found: ID does not exist" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.085687 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 23:59:31 crc kubenswrapper[4922]: E0929 23:59:31.086062 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerName="glance-log" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.086083 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerName="glance-log" Sep 29 23:59:31 crc kubenswrapper[4922]: E0929 23:59:31.086101 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerName="glance-httpd" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.086110 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerName="glance-httpd" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.086311 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerName="glance-log" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.086330 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" containerName="glance-httpd" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.087298 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.089179 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.104062 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.285991 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-logs\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.286088 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-scripts\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.286248 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-config-data\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.286430 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-ceph\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.286744 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.286818 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsdqh\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-kube-api-access-wsdqh\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.286854 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.388267 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.388561 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsdqh\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-kube-api-access-wsdqh\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.388582 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.388616 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-logs\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.388659 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-scripts\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.388677 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-config-data\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.388714 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-ceph\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.389261 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-logs\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.389330 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.393004 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-ceph\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.394484 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-config-data\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.394951 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-scripts\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.397642 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.405518 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsdqh\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-kube-api-access-wsdqh\") pod \"glance-default-external-api-0\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " pod="openstack/glance-default-external-api-0" Sep 29 23:59:31 crc kubenswrapper[4922]: I0929 23:59:31.447984 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.004371 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.016216 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerName="glance-log" containerID="cri-o://69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c" gracePeriod=30 Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.016509 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerName="glance-httpd" containerID="cri-o://3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948" gracePeriod=30 Sep 29 23:59:32 crc kubenswrapper[4922]: W0929 23:59:32.016724 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2639f2fb_9a5f_4831_9302_3e2d6cd82d84.slice/crio-c41213cf139d2d2a28cfce684711fea3e896333ab852626b70fd4d5f2ebc99de WatchSource:0}: Error finding container c41213cf139d2d2a28cfce684711fea3e896333ab852626b70fd4d5f2ebc99de: Status 404 returned error can't find the container with id c41213cf139d2d2a28cfce684711fea3e896333ab852626b70fd4d5f2ebc99de Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.433910 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7" path="/var/lib/kubelet/pods/f3ee1c8c-76fb-49c3-ba43-19d98a4c7dd7/volumes" Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.730031 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.914757 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpx68\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-kube-api-access-lpx68\") pod \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.914827 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-logs\") pod \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.914904 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-ceph\") pod \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.915038 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-combined-ca-bundle\") pod \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.915129 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-httpd-run\") pod \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.915158 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-config-data\") pod \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.915269 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-scripts\") pod \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\" (UID: \"21baf463-7fe1-45d3-9e8a-97b7dd2ce051\") " Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.916924 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "21baf463-7fe1-45d3-9e8a-97b7dd2ce051" (UID: "21baf463-7fe1-45d3-9e8a-97b7dd2ce051"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.918705 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-logs" (OuterVolumeSpecName: "logs") pod "21baf463-7fe1-45d3-9e8a-97b7dd2ce051" (UID: "21baf463-7fe1-45d3-9e8a-97b7dd2ce051"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.919496 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-scripts" (OuterVolumeSpecName: "scripts") pod "21baf463-7fe1-45d3-9e8a-97b7dd2ce051" (UID: "21baf463-7fe1-45d3-9e8a-97b7dd2ce051"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.921459 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-kube-api-access-lpx68" (OuterVolumeSpecName: "kube-api-access-lpx68") pod "21baf463-7fe1-45d3-9e8a-97b7dd2ce051" (UID: "21baf463-7fe1-45d3-9e8a-97b7dd2ce051"). InnerVolumeSpecName "kube-api-access-lpx68". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.925851 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-ceph" (OuterVolumeSpecName: "ceph") pod "21baf463-7fe1-45d3-9e8a-97b7dd2ce051" (UID: "21baf463-7fe1-45d3-9e8a-97b7dd2ce051"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.945759 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21baf463-7fe1-45d3-9e8a-97b7dd2ce051" (UID: "21baf463-7fe1-45d3-9e8a-97b7dd2ce051"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:59:32 crc kubenswrapper[4922]: I0929 23:59:32.982739 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-config-data" (OuterVolumeSpecName: "config-data") pod "21baf463-7fe1-45d3-9e8a-97b7dd2ce051" (UID: "21baf463-7fe1-45d3-9e8a-97b7dd2ce051"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.017897 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.017995 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.018008 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.018016 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.018024 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpx68\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-kube-api-access-lpx68\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.018033 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-logs\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.018041 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/21baf463-7fe1-45d3-9e8a-97b7dd2ce051-ceph\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.032142 4922 generic.go:334] "Generic (PLEG): container finished" podID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerID="3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948" exitCode=0 Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.032174 4922 generic.go:334] "Generic (PLEG): container finished" podID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerID="69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c" exitCode=143 Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.032207 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21baf463-7fe1-45d3-9e8a-97b7dd2ce051","Type":"ContainerDied","Data":"3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948"} Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.032232 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21baf463-7fe1-45d3-9e8a-97b7dd2ce051","Type":"ContainerDied","Data":"69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c"} Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.032244 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"21baf463-7fe1-45d3-9e8a-97b7dd2ce051","Type":"ContainerDied","Data":"e5b4f8f25b417b76b32262bb716dc9e1d6baf1a55e173531171af7eb28ea78de"} Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.032260 4922 scope.go:117] "RemoveContainer" containerID="3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.032349 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.037129 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2639f2fb-9a5f-4831-9302-3e2d6cd82d84","Type":"ContainerStarted","Data":"d0dbe8ea80b2c96687b2aaedecd5b006bb2c4ae1ad09b94440abe47c5645ac3a"} Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.037172 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2639f2fb-9a5f-4831-9302-3e2d6cd82d84","Type":"ContainerStarted","Data":"c41213cf139d2d2a28cfce684711fea3e896333ab852626b70fd4d5f2ebc99de"} Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.081693 4922 scope.go:117] "RemoveContainer" containerID="69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.085123 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.100558 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.116521 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 23:59:33 crc kubenswrapper[4922]: E0929 23:59:33.117063 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerName="glance-httpd" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.117082 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerName="glance-httpd" Sep 29 23:59:33 crc kubenswrapper[4922]: E0929 23:59:33.117106 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerName="glance-log" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.117113 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerName="glance-log" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.117308 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerName="glance-log" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.117333 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" containerName="glance-httpd" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.118694 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.120709 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.124821 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.129975 4922 scope.go:117] "RemoveContainer" containerID="3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948" Sep 29 23:59:33 crc kubenswrapper[4922]: E0929 23:59:33.131826 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948\": container with ID starting with 3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948 not found: ID does not exist" containerID="3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.131862 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948"} err="failed to get container status \"3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948\": rpc error: code = NotFound desc = could not find container \"3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948\": container with ID starting with 3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948 not found: ID does not exist" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.131923 4922 scope.go:117] "RemoveContainer" containerID="69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c" Sep 29 23:59:33 crc kubenswrapper[4922]: E0929 23:59:33.132433 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c\": container with ID starting with 69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c not found: ID does not exist" containerID="69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.132455 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c"} err="failed to get container status \"69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c\": rpc error: code = NotFound desc = could not find container \"69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c\": container with ID starting with 69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c not found: ID does not exist" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.132490 4922 scope.go:117] "RemoveContainer" containerID="3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.132858 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948"} err="failed to get container status \"3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948\": rpc error: code = NotFound desc = could not find container \"3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948\": container with ID starting with 3ddbd605d121c1cb0295b5b202372895652d34fb1ceac3a7aa27ced3465ef948 not found: ID does not exist" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.132982 4922 scope.go:117] "RemoveContainer" containerID="69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.133408 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c"} err="failed to get container status \"69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c\": rpc error: code = NotFound desc = could not find container \"69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c\": container with ID starting with 69557b0311ef83ca875251d5658aaaf7910f4d78f148aff3b8f2a6608749605c not found: ID does not exist" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.220798 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.220921 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.220957 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.221015 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.221178 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lch88\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-kube-api-access-lch88\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.221224 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-ceph\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.221277 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-logs\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.323338 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.323432 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.323499 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.323659 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lch88\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-kube-api-access-lch88\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.323709 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-ceph\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.323763 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-logs\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.323824 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.324672 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.325759 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-logs\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.326726 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.329315 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.330075 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-ceph\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.331892 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.338104 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lch88\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-kube-api-access-lch88\") pod \"glance-default-internal-api-0\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " pod="openstack/glance-default-internal-api-0" Sep 29 23:59:33 crc kubenswrapper[4922]: I0929 23:59:33.448113 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:34 crc kubenswrapper[4922]: I0929 23:59:34.056078 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2639f2fb-9a5f-4831-9302-3e2d6cd82d84","Type":"ContainerStarted","Data":"084e370aa0a43bd4d2727702dbb0a175fab65b9cbae2db19fc2567207a88880c"} Sep 29 23:59:34 crc kubenswrapper[4922]: I0929 23:59:34.057946 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 23:59:34 crc kubenswrapper[4922]: I0929 23:59:34.099007 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.098969811 podStartE2EDuration="3.098969811s" podCreationTimestamp="2025-09-29 23:59:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:59:34.077615425 +0000 UTC m=+5578.387904298" watchObservedRunningTime="2025-09-29 23:59:34.098969811 +0000 UTC m=+5578.409258664" Sep 29 23:59:34 crc kubenswrapper[4922]: I0929 23:59:34.435754 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21baf463-7fe1-45d3-9e8a-97b7dd2ce051" path="/var/lib/kubelet/pods/21baf463-7fe1-45d3-9e8a-97b7dd2ce051/volumes" Sep 29 23:59:35 crc kubenswrapper[4922]: I0929 23:59:35.069775 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"96af43a5-5c5a-4d87-9860-dc79a4e0a54a","Type":"ContainerStarted","Data":"ce215a73f76762f9222329879baa9c8056c4ab48717537ec703e51021124c4c1"} Sep 29 23:59:35 crc kubenswrapper[4922]: I0929 23:59:35.070178 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"96af43a5-5c5a-4d87-9860-dc79a4e0a54a","Type":"ContainerStarted","Data":"071b31e9d58eed79c9cd4c6a53bfe45252ace97afa91fbedb505586214821807"} Sep 29 23:59:36 crc kubenswrapper[4922]: I0929 23:59:36.086156 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"96af43a5-5c5a-4d87-9860-dc79a4e0a54a","Type":"ContainerStarted","Data":"a5c4fd02a838b947bdc1c121d04ea83d5bd986c680b033ec708d5f06fe6cac42"} Sep 29 23:59:36 crc kubenswrapper[4922]: I0929 23:59:36.121232 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.121206483 podStartE2EDuration="3.121206483s" podCreationTimestamp="2025-09-29 23:59:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 23:59:36.117542993 +0000 UTC m=+5580.427831836" watchObservedRunningTime="2025-09-29 23:59:36.121206483 +0000 UTC m=+5580.431495326" Sep 29 23:59:37 crc kubenswrapper[4922]: I0929 23:59:37.732688 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 29 23:59:37 crc kubenswrapper[4922]: I0929 23:59:37.826596 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69557444b9-qbrx5"] Sep 29 23:59:37 crc kubenswrapper[4922]: I0929 23:59:37.826890 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" podUID="852f7c8b-f89d-4799-927b-0168b1352db8" containerName="dnsmasq-dns" containerID="cri-o://177dd394c33d0a1e1a92c7340672e0cffb044c590405abdba16aafdee8319958" gracePeriod=10 Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.109136 4922 generic.go:334] "Generic (PLEG): container finished" podID="852f7c8b-f89d-4799-927b-0168b1352db8" containerID="177dd394c33d0a1e1a92c7340672e0cffb044c590405abdba16aafdee8319958" exitCode=0 Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.109215 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" event={"ID":"852f7c8b-f89d-4799-927b-0168b1352db8","Type":"ContainerDied","Data":"177dd394c33d0a1e1a92c7340672e0cffb044c590405abdba16aafdee8319958"} Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.353756 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.536090 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-nb\") pod \"852f7c8b-f89d-4799-927b-0168b1352db8\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.536161 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-dns-svc\") pod \"852f7c8b-f89d-4799-927b-0168b1352db8\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.536178 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-sb\") pod \"852f7c8b-f89d-4799-927b-0168b1352db8\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.536225 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25std\" (UniqueName: \"kubernetes.io/projected/852f7c8b-f89d-4799-927b-0168b1352db8-kube-api-access-25std\") pod \"852f7c8b-f89d-4799-927b-0168b1352db8\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.536265 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-config\") pod \"852f7c8b-f89d-4799-927b-0168b1352db8\" (UID: \"852f7c8b-f89d-4799-927b-0168b1352db8\") " Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.543447 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/852f7c8b-f89d-4799-927b-0168b1352db8-kube-api-access-25std" (OuterVolumeSpecName: "kube-api-access-25std") pod "852f7c8b-f89d-4799-927b-0168b1352db8" (UID: "852f7c8b-f89d-4799-927b-0168b1352db8"). InnerVolumeSpecName "kube-api-access-25std". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.593707 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "852f7c8b-f89d-4799-927b-0168b1352db8" (UID: "852f7c8b-f89d-4799-927b-0168b1352db8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.597569 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "852f7c8b-f89d-4799-927b-0168b1352db8" (UID: "852f7c8b-f89d-4799-927b-0168b1352db8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.600929 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "852f7c8b-f89d-4799-927b-0168b1352db8" (UID: "852f7c8b-f89d-4799-927b-0168b1352db8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.607038 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-config" (OuterVolumeSpecName: "config") pod "852f7c8b-f89d-4799-927b-0168b1352db8" (UID: "852f7c8b-f89d-4799-927b-0168b1352db8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.639048 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25std\" (UniqueName: \"kubernetes.io/projected/852f7c8b-f89d-4799-927b-0168b1352db8-kube-api-access-25std\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.639110 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-config\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.639133 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.639150 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:38 crc kubenswrapper[4922]: I0929 23:59:38.639168 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/852f7c8b-f89d-4799-927b-0168b1352db8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:39 crc kubenswrapper[4922]: I0929 23:59:39.129294 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" event={"ID":"852f7c8b-f89d-4799-927b-0168b1352db8","Type":"ContainerDied","Data":"5f181106b9e7fbed4c87a57e7dd9e8b99b3624c68ec38eea070497659bbd90db"} Sep 29 23:59:39 crc kubenswrapper[4922]: I0929 23:59:39.129853 4922 scope.go:117] "RemoveContainer" containerID="177dd394c33d0a1e1a92c7340672e0cffb044c590405abdba16aafdee8319958" Sep 29 23:59:39 crc kubenswrapper[4922]: I0929 23:59:39.129502 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-69557444b9-qbrx5" Sep 29 23:59:39 crc kubenswrapper[4922]: I0929 23:59:39.171171 4922 scope.go:117] "RemoveContainer" containerID="8040fc5934ead7cb92dc7291c29a76d5267fc54e73b2722e800215ac38ab4554" Sep 29 23:59:39 crc kubenswrapper[4922]: I0929 23:59:39.193410 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-69557444b9-qbrx5"] Sep 29 23:59:39 crc kubenswrapper[4922]: I0929 23:59:39.231228 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-69557444b9-qbrx5"] Sep 29 23:59:40 crc kubenswrapper[4922]: I0929 23:59:40.441209 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="852f7c8b-f89d-4799-927b-0168b1352db8" path="/var/lib/kubelet/pods/852f7c8b-f89d-4799-927b-0168b1352db8/volumes" Sep 29 23:59:41 crc kubenswrapper[4922]: I0929 23:59:41.449043 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 23:59:41 crc kubenswrapper[4922]: I0929 23:59:41.449445 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 23:59:41 crc kubenswrapper[4922]: I0929 23:59:41.490651 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 23:59:41 crc kubenswrapper[4922]: I0929 23:59:41.513780 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 23:59:42 crc kubenswrapper[4922]: I0929 23:59:42.171939 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 23:59:42 crc kubenswrapper[4922]: I0929 23:59:42.172057 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 23:59:43 crc kubenswrapper[4922]: I0929 23:59:43.449353 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:43 crc kubenswrapper[4922]: I0929 23:59:43.450260 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:43 crc kubenswrapper[4922]: I0929 23:59:43.487988 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:43 crc kubenswrapper[4922]: I0929 23:59:43.522464 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:44 crc kubenswrapper[4922]: I0929 23:59:44.092160 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 23:59:44 crc kubenswrapper[4922]: I0929 23:59:44.103085 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 23:59:44 crc kubenswrapper[4922]: I0929 23:59:44.201206 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:44 crc kubenswrapper[4922]: I0929 23:59:44.201241 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:46 crc kubenswrapper[4922]: I0929 23:59:46.041258 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:46 crc kubenswrapper[4922]: I0929 23:59:46.045878 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 23:59:52 crc kubenswrapper[4922]: I0929 23:59:52.670345 4922 scope.go:117] "RemoveContainer" containerID="468c209441a7e43092fe1f4305e54769bb77e8ce1c01ff5b2f218dd404cb1d12" Sep 29 23:59:54 crc kubenswrapper[4922]: I0929 23:59:54.854026 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-jvq8g"] Sep 29 23:59:54 crc kubenswrapper[4922]: E0929 23:59:54.855120 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="852f7c8b-f89d-4799-927b-0168b1352db8" containerName="init" Sep 29 23:59:54 crc kubenswrapper[4922]: I0929 23:59:54.855163 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="852f7c8b-f89d-4799-927b-0168b1352db8" containerName="init" Sep 29 23:59:54 crc kubenswrapper[4922]: E0929 23:59:54.855212 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="852f7c8b-f89d-4799-927b-0168b1352db8" containerName="dnsmasq-dns" Sep 29 23:59:54 crc kubenswrapper[4922]: I0929 23:59:54.855229 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="852f7c8b-f89d-4799-927b-0168b1352db8" containerName="dnsmasq-dns" Sep 29 23:59:54 crc kubenswrapper[4922]: I0929 23:59:54.855950 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="852f7c8b-f89d-4799-927b-0168b1352db8" containerName="dnsmasq-dns" Sep 29 23:59:54 crc kubenswrapper[4922]: I0929 23:59:54.857281 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jvq8g" Sep 29 23:59:54 crc kubenswrapper[4922]: I0929 23:59:54.862826 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-jvq8g"] Sep 29 23:59:54 crc kubenswrapper[4922]: I0929 23:59:54.958596 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cwpb\" (UniqueName: \"kubernetes.io/projected/fd7ac5d1-b56d-451d-a60f-ef00bc34c49b-kube-api-access-6cwpb\") pod \"placement-db-create-jvq8g\" (UID: \"fd7ac5d1-b56d-451d-a60f-ef00bc34c49b\") " pod="openstack/placement-db-create-jvq8g" Sep 29 23:59:55 crc kubenswrapper[4922]: I0929 23:59:55.060495 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cwpb\" (UniqueName: \"kubernetes.io/projected/fd7ac5d1-b56d-451d-a60f-ef00bc34c49b-kube-api-access-6cwpb\") pod \"placement-db-create-jvq8g\" (UID: \"fd7ac5d1-b56d-451d-a60f-ef00bc34c49b\") " pod="openstack/placement-db-create-jvq8g" Sep 29 23:59:55 crc kubenswrapper[4922]: I0929 23:59:55.083039 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cwpb\" (UniqueName: \"kubernetes.io/projected/fd7ac5d1-b56d-451d-a60f-ef00bc34c49b-kube-api-access-6cwpb\") pod \"placement-db-create-jvq8g\" (UID: \"fd7ac5d1-b56d-451d-a60f-ef00bc34c49b\") " pod="openstack/placement-db-create-jvq8g" Sep 29 23:59:55 crc kubenswrapper[4922]: I0929 23:59:55.195762 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jvq8g" Sep 29 23:59:55 crc kubenswrapper[4922]: I0929 23:59:55.533174 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-jvq8g"] Sep 29 23:59:56 crc kubenswrapper[4922]: I0929 23:59:56.335926 4922 generic.go:334] "Generic (PLEG): container finished" podID="fd7ac5d1-b56d-451d-a60f-ef00bc34c49b" containerID="3c7820562bc640373de4301ca6d8794484dbdb87f9e588dd4593ee41dd86ad28" exitCode=0 Sep 29 23:59:56 crc kubenswrapper[4922]: I0929 23:59:56.336208 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-jvq8g" event={"ID":"fd7ac5d1-b56d-451d-a60f-ef00bc34c49b","Type":"ContainerDied","Data":"3c7820562bc640373de4301ca6d8794484dbdb87f9e588dd4593ee41dd86ad28"} Sep 29 23:59:56 crc kubenswrapper[4922]: I0929 23:59:56.336236 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-jvq8g" event={"ID":"fd7ac5d1-b56d-451d-a60f-ef00bc34c49b","Type":"ContainerStarted","Data":"25c085992b7c154d3692cba978411da5c223e617243cca24a539981a340ae1fc"} Sep 29 23:59:57 crc kubenswrapper[4922]: I0929 23:59:57.713394 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jvq8g" Sep 29 23:59:57 crc kubenswrapper[4922]: I0929 23:59:57.918646 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cwpb\" (UniqueName: \"kubernetes.io/projected/fd7ac5d1-b56d-451d-a60f-ef00bc34c49b-kube-api-access-6cwpb\") pod \"fd7ac5d1-b56d-451d-a60f-ef00bc34c49b\" (UID: \"fd7ac5d1-b56d-451d-a60f-ef00bc34c49b\") " Sep 29 23:59:57 crc kubenswrapper[4922]: I0929 23:59:57.927626 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd7ac5d1-b56d-451d-a60f-ef00bc34c49b-kube-api-access-6cwpb" (OuterVolumeSpecName: "kube-api-access-6cwpb") pod "fd7ac5d1-b56d-451d-a60f-ef00bc34c49b" (UID: "fd7ac5d1-b56d-451d-a60f-ef00bc34c49b"). InnerVolumeSpecName "kube-api-access-6cwpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 23:59:58 crc kubenswrapper[4922]: I0929 23:59:58.020905 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cwpb\" (UniqueName: \"kubernetes.io/projected/fd7ac5d1-b56d-451d-a60f-ef00bc34c49b-kube-api-access-6cwpb\") on node \"crc\" DevicePath \"\"" Sep 29 23:59:58 crc kubenswrapper[4922]: I0929 23:59:58.362989 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-jvq8g" event={"ID":"fd7ac5d1-b56d-451d-a60f-ef00bc34c49b","Type":"ContainerDied","Data":"25c085992b7c154d3692cba978411da5c223e617243cca24a539981a340ae1fc"} Sep 29 23:59:58 crc kubenswrapper[4922]: I0929 23:59:58.363047 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25c085992b7c154d3692cba978411da5c223e617243cca24a539981a340ae1fc" Sep 29 23:59:58 crc kubenswrapper[4922]: I0929 23:59:58.363116 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-jvq8g" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.146248 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29319840-m76j2"] Sep 30 00:00:00 crc kubenswrapper[4922]: E0930 00:00:00.147515 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd7ac5d1-b56d-451d-a60f-ef00bc34c49b" containerName="mariadb-database-create" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.147554 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd7ac5d1-b56d-451d-a60f-ef00bc34c49b" containerName="mariadb-database-create" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.147964 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd7ac5d1-b56d-451d-a60f-ef00bc34c49b" containerName="mariadb-database-create" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.149210 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.151785 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.152222 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.168473 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29319840-m76j2"] Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.247358 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j"] Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.248700 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.251180 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.251594 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.258874 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j"] Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.268984 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-serviceca\") pod \"image-pruner-29319840-m76j2\" (UID: \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\") " pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.269088 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rcxk\" (UniqueName: \"kubernetes.io/projected/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-kube-api-access-4rcxk\") pod \"image-pruner-29319840-m76j2\" (UID: \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\") " pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.370612 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rcxk\" (UniqueName: \"kubernetes.io/projected/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-kube-api-access-4rcxk\") pod \"image-pruner-29319840-m76j2\" (UID: \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\") " pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.370736 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp8mj\" (UniqueName: \"kubernetes.io/projected/90d590f0-fe0e-4c8f-9da4-b842519100d9-kube-api-access-gp8mj\") pod \"collect-profiles-29319840-hkj8j\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.370850 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-serviceca\") pod \"image-pruner-29319840-m76j2\" (UID: \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\") " pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.370887 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90d590f0-fe0e-4c8f-9da4-b842519100d9-secret-volume\") pod \"collect-profiles-29319840-hkj8j\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.370921 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90d590f0-fe0e-4c8f-9da4-b842519100d9-config-volume\") pod \"collect-profiles-29319840-hkj8j\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.371946 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-serviceca\") pod \"image-pruner-29319840-m76j2\" (UID: \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\") " pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.393435 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rcxk\" (UniqueName: \"kubernetes.io/projected/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-kube-api-access-4rcxk\") pod \"image-pruner-29319840-m76j2\" (UID: \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\") " pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.474877 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp8mj\" (UniqueName: \"kubernetes.io/projected/90d590f0-fe0e-4c8f-9da4-b842519100d9-kube-api-access-gp8mj\") pod \"collect-profiles-29319840-hkj8j\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.474974 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90d590f0-fe0e-4c8f-9da4-b842519100d9-secret-volume\") pod \"collect-profiles-29319840-hkj8j\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.475002 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90d590f0-fe0e-4c8f-9da4-b842519100d9-config-volume\") pod \"collect-profiles-29319840-hkj8j\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.476870 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90d590f0-fe0e-4c8f-9da4-b842519100d9-config-volume\") pod \"collect-profiles-29319840-hkj8j\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.479069 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90d590f0-fe0e-4c8f-9da4-b842519100d9-secret-volume\") pod \"collect-profiles-29319840-hkj8j\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.491981 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp8mj\" (UniqueName: \"kubernetes.io/projected/90d590f0-fe0e-4c8f-9da4-b842519100d9-kube-api-access-gp8mj\") pod \"collect-profiles-29319840-hkj8j\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.521290 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:00 crc kubenswrapper[4922]: I0930 00:00:00.575135 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:01 crc kubenswrapper[4922]: I0930 00:00:00.995365 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29319840-m76j2"] Sep 30 00:00:01 crc kubenswrapper[4922]: W0930 00:00:00.996279 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e8fa5d1_0b6b_4477_8ff3_57f5ada755df.slice/crio-b063350d56f1e0763d85ee42710446019ac7e5d21e4d1ec9dd5aa78a9ed220ee WatchSource:0}: Error finding container b063350d56f1e0763d85ee42710446019ac7e5d21e4d1ec9dd5aa78a9ed220ee: Status 404 returned error can't find the container with id b063350d56f1e0763d85ee42710446019ac7e5d21e4d1ec9dd5aa78a9ed220ee Sep 30 00:00:01 crc kubenswrapper[4922]: I0930 00:00:01.068810 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j"] Sep 30 00:00:01 crc kubenswrapper[4922]: I0930 00:00:01.423158 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29319840-m76j2" event={"ID":"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df","Type":"ContainerStarted","Data":"6643862119e2dce774044fd8e948b495cdb143447af32df474a09c9a2d3cd5e2"} Sep 30 00:00:01 crc kubenswrapper[4922]: I0930 00:00:01.423507 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29319840-m76j2" event={"ID":"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df","Type":"ContainerStarted","Data":"b063350d56f1e0763d85ee42710446019ac7e5d21e4d1ec9dd5aa78a9ed220ee"} Sep 30 00:00:01 crc kubenswrapper[4922]: I0930 00:00:01.438111 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" event={"ID":"90d590f0-fe0e-4c8f-9da4-b842519100d9","Type":"ContainerStarted","Data":"bbb4c59fa7b2a46b5117f5be6b2cc3728d94e1e446b3de32b927ecc1ef8104fa"} Sep 30 00:00:01 crc kubenswrapper[4922]: I0930 00:00:01.438168 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" event={"ID":"90d590f0-fe0e-4c8f-9da4-b842519100d9","Type":"ContainerStarted","Data":"e58679a42ed4c68d6967e623513cff29749041c5cc89950a0d62f2b8be5e6bba"} Sep 30 00:00:01 crc kubenswrapper[4922]: I0930 00:00:01.467964 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29319840-m76j2" podStartSLOduration=1.467942978 podStartE2EDuration="1.467942978s" podCreationTimestamp="2025-09-30 00:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:00:01.445475755 +0000 UTC m=+5605.755764608" watchObservedRunningTime="2025-09-30 00:00:01.467942978 +0000 UTC m=+5605.778231801" Sep 30 00:00:01 crc kubenswrapper[4922]: I0930 00:00:01.579579 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" podStartSLOduration=1.579559135 podStartE2EDuration="1.579559135s" podCreationTimestamp="2025-09-30 00:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:00:01.474869688 +0000 UTC m=+5605.785158491" watchObservedRunningTime="2025-09-30 00:00:01.579559135 +0000 UTC m=+5605.889847948" Sep 30 00:00:02 crc kubenswrapper[4922]: I0930 00:00:02.471679 4922 generic.go:334] "Generic (PLEG): container finished" podID="90d590f0-fe0e-4c8f-9da4-b842519100d9" containerID="bbb4c59fa7b2a46b5117f5be6b2cc3728d94e1e446b3de32b927ecc1ef8104fa" exitCode=0 Sep 30 00:00:02 crc kubenswrapper[4922]: I0930 00:00:02.471763 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" event={"ID":"90d590f0-fe0e-4c8f-9da4-b842519100d9","Type":"ContainerDied","Data":"bbb4c59fa7b2a46b5117f5be6b2cc3728d94e1e446b3de32b927ecc1ef8104fa"} Sep 30 00:00:03 crc kubenswrapper[4922]: I0930 00:00:03.480971 4922 generic.go:334] "Generic (PLEG): container finished" podID="6e8fa5d1-0b6b-4477-8ff3-57f5ada755df" containerID="6643862119e2dce774044fd8e948b495cdb143447af32df474a09c9a2d3cd5e2" exitCode=0 Sep 30 00:00:03 crc kubenswrapper[4922]: I0930 00:00:03.481706 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29319840-m76j2" event={"ID":"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df","Type":"ContainerDied","Data":"6643862119e2dce774044fd8e948b495cdb143447af32df474a09c9a2d3cd5e2"} Sep 30 00:00:03 crc kubenswrapper[4922]: I0930 00:00:03.888424 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:03 crc kubenswrapper[4922]: I0930 00:00:03.964472 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90d590f0-fe0e-4c8f-9da4-b842519100d9-secret-volume\") pod \"90d590f0-fe0e-4c8f-9da4-b842519100d9\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " Sep 30 00:00:03 crc kubenswrapper[4922]: I0930 00:00:03.964521 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90d590f0-fe0e-4c8f-9da4-b842519100d9-config-volume\") pod \"90d590f0-fe0e-4c8f-9da4-b842519100d9\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " Sep 30 00:00:03 crc kubenswrapper[4922]: I0930 00:00:03.964643 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp8mj\" (UniqueName: \"kubernetes.io/projected/90d590f0-fe0e-4c8f-9da4-b842519100d9-kube-api-access-gp8mj\") pod \"90d590f0-fe0e-4c8f-9da4-b842519100d9\" (UID: \"90d590f0-fe0e-4c8f-9da4-b842519100d9\") " Sep 30 00:00:03 crc kubenswrapper[4922]: I0930 00:00:03.965542 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90d590f0-fe0e-4c8f-9da4-b842519100d9-config-volume" (OuterVolumeSpecName: "config-volume") pod "90d590f0-fe0e-4c8f-9da4-b842519100d9" (UID: "90d590f0-fe0e-4c8f-9da4-b842519100d9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:00:03 crc kubenswrapper[4922]: I0930 00:00:03.969599 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90d590f0-fe0e-4c8f-9da4-b842519100d9-kube-api-access-gp8mj" (OuterVolumeSpecName: "kube-api-access-gp8mj") pod "90d590f0-fe0e-4c8f-9da4-b842519100d9" (UID: "90d590f0-fe0e-4c8f-9da4-b842519100d9"). InnerVolumeSpecName "kube-api-access-gp8mj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:00:03 crc kubenswrapper[4922]: I0930 00:00:03.969734 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90d590f0-fe0e-4c8f-9da4-b842519100d9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "90d590f0-fe0e-4c8f-9da4-b842519100d9" (UID: "90d590f0-fe0e-4c8f-9da4-b842519100d9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.066726 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp8mj\" (UniqueName: \"kubernetes.io/projected/90d590f0-fe0e-4c8f-9da4-b842519100d9-kube-api-access-gp8mj\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.066769 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/90d590f0-fe0e-4c8f-9da4-b842519100d9-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.066781 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/90d590f0-fe0e-4c8f-9da4-b842519100d9-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.493498 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.493492 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j" event={"ID":"90d590f0-fe0e-4c8f-9da4-b842519100d9","Type":"ContainerDied","Data":"e58679a42ed4c68d6967e623513cff29749041c5cc89950a0d62f2b8be5e6bba"} Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.493932 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e58679a42ed4c68d6967e623513cff29749041c5cc89950a0d62f2b8be5e6bba" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.554188 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84"] Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.561134 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319795-48l84"] Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.757590 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.781415 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rcxk\" (UniqueName: \"kubernetes.io/projected/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-kube-api-access-4rcxk\") pod \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\" (UID: \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\") " Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.781777 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-serviceca\") pod \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\" (UID: \"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df\") " Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.782523 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-serviceca" (OuterVolumeSpecName: "serviceca") pod "6e8fa5d1-0b6b-4477-8ff3-57f5ada755df" (UID: "6e8fa5d1-0b6b-4477-8ff3-57f5ada755df"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.793512 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-kube-api-access-4rcxk" (OuterVolumeSpecName: "kube-api-access-4rcxk") pod "6e8fa5d1-0b6b-4477-8ff3-57f5ada755df" (UID: "6e8fa5d1-0b6b-4477-8ff3-57f5ada755df"). InnerVolumeSpecName "kube-api-access-4rcxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.884278 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rcxk\" (UniqueName: \"kubernetes.io/projected/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-kube-api-access-4rcxk\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.884585 4922 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/6e8fa5d1-0b6b-4477-8ff3-57f5ada755df-serviceca\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.894121 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7e9b-account-create-hqhgw"] Sep 30 00:00:04 crc kubenswrapper[4922]: E0930 00:00:04.894511 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90d590f0-fe0e-4c8f-9da4-b842519100d9" containerName="collect-profiles" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.894526 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="90d590f0-fe0e-4c8f-9da4-b842519100d9" containerName="collect-profiles" Sep 30 00:00:04 crc kubenswrapper[4922]: E0930 00:00:04.894552 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e8fa5d1-0b6b-4477-8ff3-57f5ada755df" containerName="image-pruner" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.894558 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e8fa5d1-0b6b-4477-8ff3-57f5ada755df" containerName="image-pruner" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.894724 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="90d590f0-fe0e-4c8f-9da4-b842519100d9" containerName="collect-profiles" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.894746 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e8fa5d1-0b6b-4477-8ff3-57f5ada755df" containerName="image-pruner" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.895381 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7e9b-account-create-hqhgw" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.897709 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.907273 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7e9b-account-create-hqhgw"] Sep 30 00:00:04 crc kubenswrapper[4922]: I0930 00:00:04.986438 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t97wk\" (UniqueName: \"kubernetes.io/projected/dc7f5af7-c789-4911-bf7a-92e6f2aaf128-kube-api-access-t97wk\") pod \"placement-7e9b-account-create-hqhgw\" (UID: \"dc7f5af7-c789-4911-bf7a-92e6f2aaf128\") " pod="openstack/placement-7e9b-account-create-hqhgw" Sep 30 00:00:05 crc kubenswrapper[4922]: I0930 00:00:05.088815 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t97wk\" (UniqueName: \"kubernetes.io/projected/dc7f5af7-c789-4911-bf7a-92e6f2aaf128-kube-api-access-t97wk\") pod \"placement-7e9b-account-create-hqhgw\" (UID: \"dc7f5af7-c789-4911-bf7a-92e6f2aaf128\") " pod="openstack/placement-7e9b-account-create-hqhgw" Sep 30 00:00:05 crc kubenswrapper[4922]: I0930 00:00:05.133319 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t97wk\" (UniqueName: \"kubernetes.io/projected/dc7f5af7-c789-4911-bf7a-92e6f2aaf128-kube-api-access-t97wk\") pod \"placement-7e9b-account-create-hqhgw\" (UID: \"dc7f5af7-c789-4911-bf7a-92e6f2aaf128\") " pod="openstack/placement-7e9b-account-create-hqhgw" Sep 30 00:00:05 crc kubenswrapper[4922]: I0930 00:00:05.212498 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7e9b-account-create-hqhgw" Sep 30 00:00:05 crc kubenswrapper[4922]: I0930 00:00:05.510717 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29319840-m76j2" event={"ID":"6e8fa5d1-0b6b-4477-8ff3-57f5ada755df","Type":"ContainerDied","Data":"b063350d56f1e0763d85ee42710446019ac7e5d21e4d1ec9dd5aa78a9ed220ee"} Sep 30 00:00:05 crc kubenswrapper[4922]: I0930 00:00:05.510956 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b063350d56f1e0763d85ee42710446019ac7e5d21e4d1ec9dd5aa78a9ed220ee" Sep 30 00:00:05 crc kubenswrapper[4922]: I0930 00:00:05.510952 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29319840-m76j2" Sep 30 00:00:05 crc kubenswrapper[4922]: I0930 00:00:05.734787 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7e9b-account-create-hqhgw"] Sep 30 00:00:06 crc kubenswrapper[4922]: I0930 00:00:06.434040 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="831d7d8a-3184-430f-990a-837d8f5437db" path="/var/lib/kubelet/pods/831d7d8a-3184-430f-990a-837d8f5437db/volumes" Sep 30 00:00:06 crc kubenswrapper[4922]: I0930 00:00:06.524576 4922 generic.go:334] "Generic (PLEG): container finished" podID="dc7f5af7-c789-4911-bf7a-92e6f2aaf128" containerID="82d75ed960de3c94560416552d7c60a592c0ade2811cd0906e3a8c81f75f56af" exitCode=0 Sep 30 00:00:06 crc kubenswrapper[4922]: I0930 00:00:06.524645 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7e9b-account-create-hqhgw" event={"ID":"dc7f5af7-c789-4911-bf7a-92e6f2aaf128","Type":"ContainerDied","Data":"82d75ed960de3c94560416552d7c60a592c0ade2811cd0906e3a8c81f75f56af"} Sep 30 00:00:06 crc kubenswrapper[4922]: I0930 00:00:06.524701 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7e9b-account-create-hqhgw" event={"ID":"dc7f5af7-c789-4911-bf7a-92e6f2aaf128","Type":"ContainerStarted","Data":"67fac43a61c56578959df88ebe8a60470c6e799e5231f961e6e8f0da28537771"} Sep 30 00:00:07 crc kubenswrapper[4922]: I0930 00:00:07.879811 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7e9b-account-create-hqhgw" Sep 30 00:00:07 crc kubenswrapper[4922]: I0930 00:00:07.943011 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t97wk\" (UniqueName: \"kubernetes.io/projected/dc7f5af7-c789-4911-bf7a-92e6f2aaf128-kube-api-access-t97wk\") pod \"dc7f5af7-c789-4911-bf7a-92e6f2aaf128\" (UID: \"dc7f5af7-c789-4911-bf7a-92e6f2aaf128\") " Sep 30 00:00:07 crc kubenswrapper[4922]: I0930 00:00:07.949622 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc7f5af7-c789-4911-bf7a-92e6f2aaf128-kube-api-access-t97wk" (OuterVolumeSpecName: "kube-api-access-t97wk") pod "dc7f5af7-c789-4911-bf7a-92e6f2aaf128" (UID: "dc7f5af7-c789-4911-bf7a-92e6f2aaf128"). InnerVolumeSpecName "kube-api-access-t97wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:00:08 crc kubenswrapper[4922]: I0930 00:00:08.045626 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t97wk\" (UniqueName: \"kubernetes.io/projected/dc7f5af7-c789-4911-bf7a-92e6f2aaf128-kube-api-access-t97wk\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:08 crc kubenswrapper[4922]: I0930 00:00:08.545542 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7e9b-account-create-hqhgw" event={"ID":"dc7f5af7-c789-4911-bf7a-92e6f2aaf128","Type":"ContainerDied","Data":"67fac43a61c56578959df88ebe8a60470c6e799e5231f961e6e8f0da28537771"} Sep 30 00:00:08 crc kubenswrapper[4922]: I0930 00:00:08.545598 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67fac43a61c56578959df88ebe8a60470c6e799e5231f961e6e8f0da28537771" Sep 30 00:00:08 crc kubenswrapper[4922]: I0930 00:00:08.546029 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7e9b-account-create-hqhgw" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.221331 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9b4888cd9-xfvzx"] Sep 30 00:00:10 crc kubenswrapper[4922]: E0930 00:00:10.222046 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc7f5af7-c789-4911-bf7a-92e6f2aaf128" containerName="mariadb-account-create" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.222061 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc7f5af7-c789-4911-bf7a-92e6f2aaf128" containerName="mariadb-account-create" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.222226 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc7f5af7-c789-4911-bf7a-92e6f2aaf128" containerName="mariadb-account-create" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.232127 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.241279 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9b4888cd9-xfvzx"] Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.284342 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-sb\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.284426 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-config\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.284494 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-dns-svc\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.284556 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5vq9\" (UniqueName: \"kubernetes.io/projected/4a7e5135-ca78-409c-90cc-701121a9a777-kube-api-access-s5vq9\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.284618 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-nb\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.285000 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-jcbvf"] Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.286119 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.288990 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.295219 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-k44gz" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.295465 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.310554 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jcbvf"] Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388282 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-combined-ca-bundle\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388335 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8def651b-de0d-4c92-899a-f0844c10106b-logs\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388380 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-sb\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388408 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-config-data\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388433 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-config\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388463 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-scripts\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388478 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-dns-svc\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388525 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5vq9\" (UniqueName: \"kubernetes.io/projected/4a7e5135-ca78-409c-90cc-701121a9a777-kube-api-access-s5vq9\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388568 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4dwm\" (UniqueName: \"kubernetes.io/projected/8def651b-de0d-4c92-899a-f0844c10106b-kube-api-access-b4dwm\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.388591 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-nb\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.389548 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-config\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.389555 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-nb\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.390217 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-dns-svc\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.390219 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-sb\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.414257 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5vq9\" (UniqueName: \"kubernetes.io/projected/4a7e5135-ca78-409c-90cc-701121a9a777-kube-api-access-s5vq9\") pod \"dnsmasq-dns-9b4888cd9-xfvzx\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.489648 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4dwm\" (UniqueName: \"kubernetes.io/projected/8def651b-de0d-4c92-899a-f0844c10106b-kube-api-access-b4dwm\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.489738 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-combined-ca-bundle\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.489779 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8def651b-de0d-4c92-899a-f0844c10106b-logs\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.489819 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-config-data\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.489851 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-scripts\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.490831 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8def651b-de0d-4c92-899a-f0844c10106b-logs\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.508782 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-scripts\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.509827 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-combined-ca-bundle\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.510592 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-config-data\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.516004 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4dwm\" (UniqueName: \"kubernetes.io/projected/8def651b-de0d-4c92-899a-f0844c10106b-kube-api-access-b4dwm\") pod \"placement-db-sync-jcbvf\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.557576 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:10 crc kubenswrapper[4922]: I0930 00:00:10.624741 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:11 crc kubenswrapper[4922]: I0930 00:00:11.038782 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9b4888cd9-xfvzx"] Sep 30 00:00:11 crc kubenswrapper[4922]: W0930 00:00:11.092207 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8def651b_de0d_4c92_899a_f0844c10106b.slice/crio-93e78c097f0605cf490611d9ef2d11b0bc32dacabab994d30a2c9e06c5157912 WatchSource:0}: Error finding container 93e78c097f0605cf490611d9ef2d11b0bc32dacabab994d30a2c9e06c5157912: Status 404 returned error can't find the container with id 93e78c097f0605cf490611d9ef2d11b0bc32dacabab994d30a2c9e06c5157912 Sep 30 00:00:11 crc kubenswrapper[4922]: I0930 00:00:11.095455 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jcbvf"] Sep 30 00:00:11 crc kubenswrapper[4922]: I0930 00:00:11.586700 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jcbvf" event={"ID":"8def651b-de0d-4c92-899a-f0844c10106b","Type":"ContainerStarted","Data":"f7c76236e3b2fe37d36785d46cc8bfdd5c3a705a43cc0ba924809d220bb606bd"} Sep 30 00:00:11 crc kubenswrapper[4922]: I0930 00:00:11.587003 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jcbvf" event={"ID":"8def651b-de0d-4c92-899a-f0844c10106b","Type":"ContainerStarted","Data":"93e78c097f0605cf490611d9ef2d11b0bc32dacabab994d30a2c9e06c5157912"} Sep 30 00:00:11 crc kubenswrapper[4922]: I0930 00:00:11.590969 4922 generic.go:334] "Generic (PLEG): container finished" podID="4a7e5135-ca78-409c-90cc-701121a9a777" containerID="3f86b17b36135db1ac391c4c27973ef75da08f863366e8cd28f20a8d44244a69" exitCode=0 Sep 30 00:00:11 crc kubenswrapper[4922]: I0930 00:00:11.591022 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" event={"ID":"4a7e5135-ca78-409c-90cc-701121a9a777","Type":"ContainerDied","Data":"3f86b17b36135db1ac391c4c27973ef75da08f863366e8cd28f20a8d44244a69"} Sep 30 00:00:11 crc kubenswrapper[4922]: I0930 00:00:11.591066 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" event={"ID":"4a7e5135-ca78-409c-90cc-701121a9a777","Type":"ContainerStarted","Data":"fdc9b52d3a5387fa6b79c73b4e00c014a01a4dcb7d60e0cd18cef43d3a7e1716"} Sep 30 00:00:11 crc kubenswrapper[4922]: I0930 00:00:11.628793 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-jcbvf" podStartSLOduration=1.628760094 podStartE2EDuration="1.628760094s" podCreationTimestamp="2025-09-30 00:00:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:00:11.611754225 +0000 UTC m=+5615.922043078" watchObservedRunningTime="2025-09-30 00:00:11.628760094 +0000 UTC m=+5615.939048947" Sep 30 00:00:12 crc kubenswrapper[4922]: I0930 00:00:12.607816 4922 generic.go:334] "Generic (PLEG): container finished" podID="8def651b-de0d-4c92-899a-f0844c10106b" containerID="f7c76236e3b2fe37d36785d46cc8bfdd5c3a705a43cc0ba924809d220bb606bd" exitCode=0 Sep 30 00:00:12 crc kubenswrapper[4922]: I0930 00:00:12.608047 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jcbvf" event={"ID":"8def651b-de0d-4c92-899a-f0844c10106b","Type":"ContainerDied","Data":"f7c76236e3b2fe37d36785d46cc8bfdd5c3a705a43cc0ba924809d220bb606bd"} Sep 30 00:00:12 crc kubenswrapper[4922]: I0930 00:00:12.620830 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" event={"ID":"4a7e5135-ca78-409c-90cc-701121a9a777","Type":"ContainerStarted","Data":"e46f49827c57bf4b871a31254d4c25941c91991f62f2cc02923230f00f3a58b7"} Sep 30 00:00:12 crc kubenswrapper[4922]: I0930 00:00:12.621523 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:12 crc kubenswrapper[4922]: I0930 00:00:12.670091 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" podStartSLOduration=2.670062883 podStartE2EDuration="2.670062883s" podCreationTimestamp="2025-09-30 00:00:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:00:12.656487249 +0000 UTC m=+5616.966776072" watchObservedRunningTime="2025-09-30 00:00:12.670062883 +0000 UTC m=+5616.980351726" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.032456 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.164604 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-combined-ca-bundle\") pod \"8def651b-de0d-4c92-899a-f0844c10106b\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.164725 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4dwm\" (UniqueName: \"kubernetes.io/projected/8def651b-de0d-4c92-899a-f0844c10106b-kube-api-access-b4dwm\") pod \"8def651b-de0d-4c92-899a-f0844c10106b\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.164808 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8def651b-de0d-4c92-899a-f0844c10106b-logs\") pod \"8def651b-de0d-4c92-899a-f0844c10106b\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.164845 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-config-data\") pod \"8def651b-de0d-4c92-899a-f0844c10106b\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.164894 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-scripts\") pod \"8def651b-de0d-4c92-899a-f0844c10106b\" (UID: \"8def651b-de0d-4c92-899a-f0844c10106b\") " Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.165999 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8def651b-de0d-4c92-899a-f0844c10106b-logs" (OuterVolumeSpecName: "logs") pod "8def651b-de0d-4c92-899a-f0844c10106b" (UID: "8def651b-de0d-4c92-899a-f0844c10106b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.173783 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8def651b-de0d-4c92-899a-f0844c10106b-kube-api-access-b4dwm" (OuterVolumeSpecName: "kube-api-access-b4dwm") pod "8def651b-de0d-4c92-899a-f0844c10106b" (UID: "8def651b-de0d-4c92-899a-f0844c10106b"). InnerVolumeSpecName "kube-api-access-b4dwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.181674 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-scripts" (OuterVolumeSpecName: "scripts") pod "8def651b-de0d-4c92-899a-f0844c10106b" (UID: "8def651b-de0d-4c92-899a-f0844c10106b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.194721 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8def651b-de0d-4c92-899a-f0844c10106b" (UID: "8def651b-de0d-4c92-899a-f0844c10106b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.195506 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-config-data" (OuterVolumeSpecName: "config-data") pod "8def651b-de0d-4c92-899a-f0844c10106b" (UID: "8def651b-de0d-4c92-899a-f0844c10106b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.266410 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.266441 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4dwm\" (UniqueName: \"kubernetes.io/projected/8def651b-de0d-4c92-899a-f0844c10106b-kube-api-access-b4dwm\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.266453 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8def651b-de0d-4c92-899a-f0844c10106b-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.266461 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.266471 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8def651b-de0d-4c92-899a-f0844c10106b-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.645791 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jcbvf" event={"ID":"8def651b-de0d-4c92-899a-f0844c10106b","Type":"ContainerDied","Data":"93e78c097f0605cf490611d9ef2d11b0bc32dacabab994d30a2c9e06c5157912"} Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.646197 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93e78c097f0605cf490611d9ef2d11b0bc32dacabab994d30a2c9e06c5157912" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.646046 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jcbvf" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.727752 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-84cc4f499b-klhmc"] Sep 30 00:00:14 crc kubenswrapper[4922]: E0930 00:00:14.728081 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8def651b-de0d-4c92-899a-f0844c10106b" containerName="placement-db-sync" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.728098 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8def651b-de0d-4c92-899a-f0844c10106b" containerName="placement-db-sync" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.728253 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8def651b-de0d-4c92-899a-f0844c10106b" containerName="placement-db-sync" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.729163 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.731563 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-k44gz" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.731771 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.736163 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.743346 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-84cc4f499b-klhmc"] Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.773929 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zssrp\" (UniqueName: \"kubernetes.io/projected/81c37d6d-b586-4719-9030-c718360c46a1-kube-api-access-zssrp\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.774213 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81c37d6d-b586-4719-9030-c718360c46a1-logs\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.774920 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c37d6d-b586-4719-9030-c718360c46a1-scripts\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.775182 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c37d6d-b586-4719-9030-c718360c46a1-config-data\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.775280 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c37d6d-b586-4719-9030-c718360c46a1-combined-ca-bundle\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.875903 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c37d6d-b586-4719-9030-c718360c46a1-config-data\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.875947 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c37d6d-b586-4719-9030-c718360c46a1-combined-ca-bundle\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.875976 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zssrp\" (UniqueName: \"kubernetes.io/projected/81c37d6d-b586-4719-9030-c718360c46a1-kube-api-access-zssrp\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.876007 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81c37d6d-b586-4719-9030-c718360c46a1-logs\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.876050 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c37d6d-b586-4719-9030-c718360c46a1-scripts\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.877477 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/81c37d6d-b586-4719-9030-c718360c46a1-logs\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.879258 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81c37d6d-b586-4719-9030-c718360c46a1-scripts\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.880929 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81c37d6d-b586-4719-9030-c718360c46a1-combined-ca-bundle\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.882982 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81c37d6d-b586-4719-9030-c718360c46a1-config-data\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:14 crc kubenswrapper[4922]: I0930 00:00:14.898320 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zssrp\" (UniqueName: \"kubernetes.io/projected/81c37d6d-b586-4719-9030-c718360c46a1-kube-api-access-zssrp\") pod \"placement-84cc4f499b-klhmc\" (UID: \"81c37d6d-b586-4719-9030-c718360c46a1\") " pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:15 crc kubenswrapper[4922]: I0930 00:00:15.044190 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:15 crc kubenswrapper[4922]: I0930 00:00:15.539946 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-84cc4f499b-klhmc"] Sep 30 00:00:15 crc kubenswrapper[4922]: I0930 00:00:15.654158 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-84cc4f499b-klhmc" event={"ID":"81c37d6d-b586-4719-9030-c718360c46a1","Type":"ContainerStarted","Data":"bbfbda13ff32dbafd6d1eff71fbf4ce0093002b3ce3a3ce89e35e86ceb88c664"} Sep 30 00:00:16 crc kubenswrapper[4922]: I0930 00:00:16.666498 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-84cc4f499b-klhmc" event={"ID":"81c37d6d-b586-4719-9030-c718360c46a1","Type":"ContainerStarted","Data":"4518e6d17a40f3a7df2e51f43528902443ac4305e22f902ae69b0aabc8d0e938"} Sep 30 00:00:16 crc kubenswrapper[4922]: I0930 00:00:16.666750 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-84cc4f499b-klhmc" event={"ID":"81c37d6d-b586-4719-9030-c718360c46a1","Type":"ContainerStarted","Data":"5fdc76992bc11a304a4c14c7f507782f84ace654b1d25c5e899e48c62506ee9e"} Sep 30 00:00:16 crc kubenswrapper[4922]: I0930 00:00:16.666908 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:16 crc kubenswrapper[4922]: I0930 00:00:16.666926 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:16 crc kubenswrapper[4922]: I0930 00:00:16.704884 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-84cc4f499b-klhmc" podStartSLOduration=2.704855321 podStartE2EDuration="2.704855321s" podCreationTimestamp="2025-09-30 00:00:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:00:16.701305464 +0000 UTC m=+5621.011594287" watchObservedRunningTime="2025-09-30 00:00:16.704855321 +0000 UTC m=+5621.015144164" Sep 30 00:00:20 crc kubenswrapper[4922]: I0930 00:00:20.560710 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:00:20 crc kubenswrapper[4922]: I0930 00:00:20.631192 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6d4bccbf-f4cdf"] Sep 30 00:00:20 crc kubenswrapper[4922]: I0930 00:00:20.631763 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" podUID="8cdf4dda-d85c-4255-a0ff-5d410af37a54" containerName="dnsmasq-dns" containerID="cri-o://f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e" gracePeriod=10 Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.084323 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.103017 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-sb\") pod \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.103105 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-config\") pod \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.103254 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-dns-svc\") pod \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.103289 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-nb\") pod \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.103348 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfcw9\" (UniqueName: \"kubernetes.io/projected/8cdf4dda-d85c-4255-a0ff-5d410af37a54-kube-api-access-qfcw9\") pod \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\" (UID: \"8cdf4dda-d85c-4255-a0ff-5d410af37a54\") " Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.112073 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cdf4dda-d85c-4255-a0ff-5d410af37a54-kube-api-access-qfcw9" (OuterVolumeSpecName: "kube-api-access-qfcw9") pod "8cdf4dda-d85c-4255-a0ff-5d410af37a54" (UID: "8cdf4dda-d85c-4255-a0ff-5d410af37a54"). InnerVolumeSpecName "kube-api-access-qfcw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.151344 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8cdf4dda-d85c-4255-a0ff-5d410af37a54" (UID: "8cdf4dda-d85c-4255-a0ff-5d410af37a54"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.154220 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-config" (OuterVolumeSpecName: "config") pod "8cdf4dda-d85c-4255-a0ff-5d410af37a54" (UID: "8cdf4dda-d85c-4255-a0ff-5d410af37a54"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.167729 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8cdf4dda-d85c-4255-a0ff-5d410af37a54" (UID: "8cdf4dda-d85c-4255-a0ff-5d410af37a54"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.174476 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8cdf4dda-d85c-4255-a0ff-5d410af37a54" (UID: "8cdf4dda-d85c-4255-a0ff-5d410af37a54"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.205024 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.205077 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfcw9\" (UniqueName: \"kubernetes.io/projected/8cdf4dda-d85c-4255-a0ff-5d410af37a54-kube-api-access-qfcw9\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.205088 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.205098 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.205106 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cdf4dda-d85c-4255-a0ff-5d410af37a54-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.749898 4922 generic.go:334] "Generic (PLEG): container finished" podID="8cdf4dda-d85c-4255-a0ff-5d410af37a54" containerID="f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e" exitCode=0 Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.749991 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" event={"ID":"8cdf4dda-d85c-4255-a0ff-5d410af37a54","Type":"ContainerDied","Data":"f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e"} Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.750035 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" event={"ID":"8cdf4dda-d85c-4255-a0ff-5d410af37a54","Type":"ContainerDied","Data":"8e6d9c4443b142a5e4c9cc7938ef9b4f61ab327602f350fb8cbf023590b175c4"} Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.750062 4922 scope.go:117] "RemoveContainer" containerID="f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.749997 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6d4bccbf-f4cdf" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.813789 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6d4bccbf-f4cdf"] Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.815976 4922 scope.go:117] "RemoveContainer" containerID="05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.826901 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c6d4bccbf-f4cdf"] Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.838737 4922 scope.go:117] "RemoveContainer" containerID="f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e" Sep 30 00:00:21 crc kubenswrapper[4922]: E0930 00:00:21.839239 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e\": container with ID starting with f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e not found: ID does not exist" containerID="f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.839275 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e"} err="failed to get container status \"f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e\": rpc error: code = NotFound desc = could not find container \"f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e\": container with ID starting with f5d55a2dd8071e85ce354b7cff7d83047138987b03519af2963f2bdf1748720e not found: ID does not exist" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.839295 4922 scope.go:117] "RemoveContainer" containerID="05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6" Sep 30 00:00:21 crc kubenswrapper[4922]: E0930 00:00:21.839707 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6\": container with ID starting with 05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6 not found: ID does not exist" containerID="05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6" Sep 30 00:00:21 crc kubenswrapper[4922]: I0930 00:00:21.839733 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6"} err="failed to get container status \"05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6\": rpc error: code = NotFound desc = could not find container \"05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6\": container with ID starting with 05f1478e49d0228b2c58fc9847fd9751e67886e8cb149671d376fb075a26e5e6 not found: ID does not exist" Sep 30 00:00:22 crc kubenswrapper[4922]: I0930 00:00:22.438321 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cdf4dda-d85c-4255-a0ff-5d410af37a54" path="/var/lib/kubelet/pods/8cdf4dda-d85c-4255-a0ff-5d410af37a54/volumes" Sep 30 00:00:46 crc kubenswrapper[4922]: I0930 00:00:46.034438 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:46 crc kubenswrapper[4922]: I0930 00:00:46.079004 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-84cc4f499b-klhmc" Sep 30 00:00:52 crc kubenswrapper[4922]: I0930 00:00:52.831868 4922 scope.go:117] "RemoveContainer" containerID="ee9b371d6d9a4d3c4432025412c08ef62f58ca0857c33e6f25052f1752ce8b77" Sep 30 00:00:52 crc kubenswrapper[4922]: I0930 00:00:52.860954 4922 scope.go:117] "RemoveContainer" containerID="9ab728a18028a9fa5fa11afe8d3a92d7241ea9d5ba0696fd63bcdf4c26f2ec5a" Sep 30 00:00:52 crc kubenswrapper[4922]: I0930 00:00:52.905735 4922 scope.go:117] "RemoveContainer" containerID="ca2850b54ce7078105fa7ad90b152503fc253a16cc4825958fb7455221ac82bb" Sep 30 00:00:52 crc kubenswrapper[4922]: I0930 00:00:52.955484 4922 scope.go:117] "RemoveContainer" containerID="5bf7451e171a77d23957f30eb754aa56283baa52b88596adbf48cabbe70ef135" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.175518 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319841-ptdh5"] Sep 30 00:01:00 crc kubenswrapper[4922]: E0930 00:01:00.178947 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdf4dda-d85c-4255-a0ff-5d410af37a54" containerName="dnsmasq-dns" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.178977 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdf4dda-d85c-4255-a0ff-5d410af37a54" containerName="dnsmasq-dns" Sep 30 00:01:00 crc kubenswrapper[4922]: E0930 00:01:00.179013 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cdf4dda-d85c-4255-a0ff-5d410af37a54" containerName="init" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.179023 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cdf4dda-d85c-4255-a0ff-5d410af37a54" containerName="init" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.179292 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cdf4dda-d85c-4255-a0ff-5d410af37a54" containerName="dnsmasq-dns" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.180172 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.190609 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-purge-29319841-nx6vg"] Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.191921 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.194005 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.222257 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319841-ptdh5"] Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.241302 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-purge-29319841-nx6vg"] Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.303286 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-config-data\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.303655 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-purge-config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-db-purge-config-data\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.303766 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-config-data\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.303856 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sf2cw\" (UniqueName: \"kubernetes.io/projected/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-kube-api-access-sf2cw\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.304090 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-combined-ca-bundle\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.304209 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-fernet-keys\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.304404 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-combined-ca-bundle\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.304504 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bxr5\" (UniqueName: \"kubernetes.io/projected/b4a4d0c7-0816-4323-938d-ddca4803d4c2-kube-api-access-9bxr5\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.406709 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-combined-ca-bundle\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.406793 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-fernet-keys\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.407051 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-combined-ca-bundle\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.407196 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bxr5\" (UniqueName: \"kubernetes.io/projected/b4a4d0c7-0816-4323-938d-ddca4803d4c2-kube-api-access-9bxr5\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.407289 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-config-data\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.407343 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-purge-config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-db-purge-config-data\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.407381 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-config-data\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.407443 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sf2cw\" (UniqueName: \"kubernetes.io/projected/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-kube-api-access-sf2cw\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.413473 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-purge-config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-db-purge-config-data\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.415465 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-combined-ca-bundle\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.416199 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-combined-ca-bundle\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.422078 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-config-data\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.427329 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sf2cw\" (UniqueName: \"kubernetes.io/projected/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-kube-api-access-sf2cw\") pod \"glance-db-purge-29319841-nx6vg\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.428021 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-fernet-keys\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.428030 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-config-data\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.428567 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bxr5\" (UniqueName: \"kubernetes.io/projected/b4a4d0c7-0816-4323-938d-ddca4803d4c2-kube-api-access-9bxr5\") pod \"keystone-cron-29319841-ptdh5\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.530958 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:00 crc kubenswrapper[4922]: I0930 00:01:00.550001 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:01 crc kubenswrapper[4922]: I0930 00:01:01.010639 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319841-ptdh5"] Sep 30 00:01:01 crc kubenswrapper[4922]: I0930 00:01:01.067204 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-purge-29319841-nx6vg"] Sep 30 00:01:01 crc kubenswrapper[4922]: W0930 00:01:01.079804 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfa2ec61_bb7e_4444_bd84_9a9a41ba1340.slice/crio-88713a9c4988bf0cdffea0f36c4ef50da82da88994487e87631e53073662453f WatchSource:0}: Error finding container 88713a9c4988bf0cdffea0f36c4ef50da82da88994487e87631e53073662453f: Status 404 returned error can't find the container with id 88713a9c4988bf0cdffea0f36c4ef50da82da88994487e87631e53073662453f Sep 30 00:01:01 crc kubenswrapper[4922]: I0930 00:01:01.203117 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-purge-29319841-nx6vg" event={"ID":"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340","Type":"ContainerStarted","Data":"88713a9c4988bf0cdffea0f36c4ef50da82da88994487e87631e53073662453f"} Sep 30 00:01:01 crc kubenswrapper[4922]: I0930 00:01:01.204969 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319841-ptdh5" event={"ID":"b4a4d0c7-0816-4323-938d-ddca4803d4c2","Type":"ContainerStarted","Data":"9f9b984ddcfe4de99753c09d64706e7aa104a1f076aa1b130f69c6f3b83e4127"} Sep 30 00:01:02 crc kubenswrapper[4922]: I0930 00:01:02.223012 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-purge-29319841-nx6vg" event={"ID":"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340","Type":"ContainerStarted","Data":"14261bb5af49b8ed55dad9d02bb2ea60615f7babb9ff2540eda2060468eb1119"} Sep 30 00:01:02 crc kubenswrapper[4922]: I0930 00:01:02.225614 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319841-ptdh5" event={"ID":"b4a4d0c7-0816-4323-938d-ddca4803d4c2","Type":"ContainerStarted","Data":"a537585adc3a19eb2a541ad844ea24d8b7ded86c23b29ccd2dbf51d09f59f1eb"} Sep 30 00:01:02 crc kubenswrapper[4922]: I0930 00:01:02.254681 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-purge-29319841-nx6vg" podStartSLOduration=2.254666005 podStartE2EDuration="2.254666005s" podCreationTimestamp="2025-09-30 00:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:02.242903824 +0000 UTC m=+5666.553192637" watchObservedRunningTime="2025-09-30 00:01:02.254666005 +0000 UTC m=+5666.564954818" Sep 30 00:01:02 crc kubenswrapper[4922]: I0930 00:01:02.266784 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319841-ptdh5" podStartSLOduration=2.266763043 podStartE2EDuration="2.266763043s" podCreationTimestamp="2025-09-30 00:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:02.264909737 +0000 UTC m=+5666.575198550" watchObservedRunningTime="2025-09-30 00:01:02.266763043 +0000 UTC m=+5666.577051846" Sep 30 00:01:03 crc kubenswrapper[4922]: I0930 00:01:03.245887 4922 generic.go:334] "Generic (PLEG): container finished" podID="bfa2ec61-bb7e-4444-bd84-9a9a41ba1340" containerID="14261bb5af49b8ed55dad9d02bb2ea60615f7babb9ff2540eda2060468eb1119" exitCode=0 Sep 30 00:01:03 crc kubenswrapper[4922]: I0930 00:01:03.245930 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-purge-29319841-nx6vg" event={"ID":"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340","Type":"ContainerDied","Data":"14261bb5af49b8ed55dad9d02bb2ea60615f7babb9ff2540eda2060468eb1119"} Sep 30 00:01:03 crc kubenswrapper[4922]: I0930 00:01:03.249510 4922 generic.go:334] "Generic (PLEG): container finished" podID="b4a4d0c7-0816-4323-938d-ddca4803d4c2" containerID="a537585adc3a19eb2a541ad844ea24d8b7ded86c23b29ccd2dbf51d09f59f1eb" exitCode=0 Sep 30 00:01:03 crc kubenswrapper[4922]: I0930 00:01:03.249537 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319841-ptdh5" event={"ID":"b4a4d0c7-0816-4323-938d-ddca4803d4c2","Type":"ContainerDied","Data":"a537585adc3a19eb2a541ad844ea24d8b7ded86c23b29ccd2dbf51d09f59f1eb"} Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.735429 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.742126 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.803771 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-fernet-keys\") pod \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.803891 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bxr5\" (UniqueName: \"kubernetes.io/projected/b4a4d0c7-0816-4323-938d-ddca4803d4c2-kube-api-access-9bxr5\") pod \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.803941 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-config-data\") pod \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.803990 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-config-data\") pod \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.804021 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sf2cw\" (UniqueName: \"kubernetes.io/projected/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-kube-api-access-sf2cw\") pod \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.804091 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-combined-ca-bundle\") pod \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\" (UID: \"b4a4d0c7-0816-4323-938d-ddca4803d4c2\") " Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.804134 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-purge-config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-db-purge-config-data\") pod \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.804206 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-combined-ca-bundle\") pod \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\" (UID: \"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340\") " Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.810304 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b4a4d0c7-0816-4323-938d-ddca4803d4c2" (UID: "b4a4d0c7-0816-4323-938d-ddca4803d4c2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.810787 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-db-purge-config-data" (OuterVolumeSpecName: "db-purge-config-data") pod "bfa2ec61-bb7e-4444-bd84-9a9a41ba1340" (UID: "bfa2ec61-bb7e-4444-bd84-9a9a41ba1340"). InnerVolumeSpecName "db-purge-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.811680 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4a4d0c7-0816-4323-938d-ddca4803d4c2-kube-api-access-9bxr5" (OuterVolumeSpecName: "kube-api-access-9bxr5") pod "b4a4d0c7-0816-4323-938d-ddca4803d4c2" (UID: "b4a4d0c7-0816-4323-938d-ddca4803d4c2"). InnerVolumeSpecName "kube-api-access-9bxr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.815011 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-kube-api-access-sf2cw" (OuterVolumeSpecName: "kube-api-access-sf2cw") pod "bfa2ec61-bb7e-4444-bd84-9a9a41ba1340" (UID: "bfa2ec61-bb7e-4444-bd84-9a9a41ba1340"). InnerVolumeSpecName "kube-api-access-sf2cw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.841839 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4a4d0c7-0816-4323-938d-ddca4803d4c2" (UID: "b4a4d0c7-0816-4323-938d-ddca4803d4c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.845091 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bfa2ec61-bb7e-4444-bd84-9a9a41ba1340" (UID: "bfa2ec61-bb7e-4444-bd84-9a9a41ba1340"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.847171 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-config-data" (OuterVolumeSpecName: "config-data") pod "bfa2ec61-bb7e-4444-bd84-9a9a41ba1340" (UID: "bfa2ec61-bb7e-4444-bd84-9a9a41ba1340"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.865415 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-config-data" (OuterVolumeSpecName: "config-data") pod "b4a4d0c7-0816-4323-938d-ddca4803d4c2" (UID: "b4a4d0c7-0816-4323-938d-ddca4803d4c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.906227 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.906259 4922 reconciler_common.go:293] "Volume detached for volume \"db-purge-config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-db-purge-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.906272 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.906280 4922 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.906289 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bxr5\" (UniqueName: \"kubernetes.io/projected/b4a4d0c7-0816-4323-938d-ddca4803d4c2-kube-api-access-9bxr5\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.906297 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4a4d0c7-0816-4323-938d-ddca4803d4c2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.906305 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:04 crc kubenswrapper[4922]: I0930 00:01:04.906312 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sf2cw\" (UniqueName: \"kubernetes.io/projected/bfa2ec61-bb7e-4444-bd84-9a9a41ba1340-kube-api-access-sf2cw\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:05 crc kubenswrapper[4922]: I0930 00:01:05.271617 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-purge-29319841-nx6vg" Sep 30 00:01:05 crc kubenswrapper[4922]: I0930 00:01:05.271641 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-purge-29319841-nx6vg" event={"ID":"bfa2ec61-bb7e-4444-bd84-9a9a41ba1340","Type":"ContainerDied","Data":"88713a9c4988bf0cdffea0f36c4ef50da82da88994487e87631e53073662453f"} Sep 30 00:01:05 crc kubenswrapper[4922]: I0930 00:01:05.271750 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88713a9c4988bf0cdffea0f36c4ef50da82da88994487e87631e53073662453f" Sep 30 00:01:05 crc kubenswrapper[4922]: I0930 00:01:05.274092 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319841-ptdh5" event={"ID":"b4a4d0c7-0816-4323-938d-ddca4803d4c2","Type":"ContainerDied","Data":"9f9b984ddcfe4de99753c09d64706e7aa104a1f076aa1b130f69c6f3b83e4127"} Sep 30 00:01:05 crc kubenswrapper[4922]: I0930 00:01:05.274283 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f9b984ddcfe4de99753c09d64706e7aa104a1f076aa1b130f69c6f3b83e4127" Sep 30 00:01:05 crc kubenswrapper[4922]: I0930 00:01:05.274220 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319841-ptdh5" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.551941 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-ndjw5"] Sep 30 00:01:10 crc kubenswrapper[4922]: E0930 00:01:10.552990 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4a4d0c7-0816-4323-938d-ddca4803d4c2" containerName="keystone-cron" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.553009 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4a4d0c7-0816-4323-938d-ddca4803d4c2" containerName="keystone-cron" Sep 30 00:01:10 crc kubenswrapper[4922]: E0930 00:01:10.553032 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfa2ec61-bb7e-4444-bd84-9a9a41ba1340" containerName="glance-dbpurge" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.553039 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfa2ec61-bb7e-4444-bd84-9a9a41ba1340" containerName="glance-dbpurge" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.553255 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfa2ec61-bb7e-4444-bd84-9a9a41ba1340" containerName="glance-dbpurge" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.553274 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4a4d0c7-0816-4323-938d-ddca4803d4c2" containerName="keystone-cron" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.553989 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-ndjw5" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.561369 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-ndjw5"] Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.615954 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twvf8\" (UniqueName: \"kubernetes.io/projected/639a2c8b-1225-4c52-8da9-fd1e6d24f38b-kube-api-access-twvf8\") pod \"nova-api-db-create-ndjw5\" (UID: \"639a2c8b-1225-4c52-8da9-fd1e6d24f38b\") " pod="openstack/nova-api-db-create-ndjw5" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.669527 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-wx9vk"] Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.670943 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wx9vk" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.691848 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-wx9vk"] Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.720339 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twvf8\" (UniqueName: \"kubernetes.io/projected/639a2c8b-1225-4c52-8da9-fd1e6d24f38b-kube-api-access-twvf8\") pod \"nova-api-db-create-ndjw5\" (UID: \"639a2c8b-1225-4c52-8da9-fd1e6d24f38b\") " pod="openstack/nova-api-db-create-ndjw5" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.720460 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pzkf\" (UniqueName: \"kubernetes.io/projected/6f158e45-b59f-4f85-95f2-4c9c5a410984-kube-api-access-7pzkf\") pod \"nova-cell0-db-create-wx9vk\" (UID: \"6f158e45-b59f-4f85-95f2-4c9c5a410984\") " pod="openstack/nova-cell0-db-create-wx9vk" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.745657 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twvf8\" (UniqueName: \"kubernetes.io/projected/639a2c8b-1225-4c52-8da9-fd1e6d24f38b-kube-api-access-twvf8\") pod \"nova-api-db-create-ndjw5\" (UID: \"639a2c8b-1225-4c52-8da9-fd1e6d24f38b\") " pod="openstack/nova-api-db-create-ndjw5" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.749803 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-6jbpz"] Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.750910 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6jbpz" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.767850 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-6jbpz"] Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.825973 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pzkf\" (UniqueName: \"kubernetes.io/projected/6f158e45-b59f-4f85-95f2-4c9c5a410984-kube-api-access-7pzkf\") pod \"nova-cell0-db-create-wx9vk\" (UID: \"6f158e45-b59f-4f85-95f2-4c9c5a410984\") " pod="openstack/nova-cell0-db-create-wx9vk" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.826138 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq6j4\" (UniqueName: \"kubernetes.io/projected/197a91a3-1cbf-40c2-bda0-84a389e78366-kube-api-access-kq6j4\") pod \"nova-cell1-db-create-6jbpz\" (UID: \"197a91a3-1cbf-40c2-bda0-84a389e78366\") " pod="openstack/nova-cell1-db-create-6jbpz" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.842009 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pzkf\" (UniqueName: \"kubernetes.io/projected/6f158e45-b59f-4f85-95f2-4c9c5a410984-kube-api-access-7pzkf\") pod \"nova-cell0-db-create-wx9vk\" (UID: \"6f158e45-b59f-4f85-95f2-4c9c5a410984\") " pod="openstack/nova-cell0-db-create-wx9vk" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.927543 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq6j4\" (UniqueName: \"kubernetes.io/projected/197a91a3-1cbf-40c2-bda0-84a389e78366-kube-api-access-kq6j4\") pod \"nova-cell1-db-create-6jbpz\" (UID: \"197a91a3-1cbf-40c2-bda0-84a389e78366\") " pod="openstack/nova-cell1-db-create-6jbpz" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.932197 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-ndjw5" Sep 30 00:01:10 crc kubenswrapper[4922]: I0930 00:01:10.958445 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq6j4\" (UniqueName: \"kubernetes.io/projected/197a91a3-1cbf-40c2-bda0-84a389e78366-kube-api-access-kq6j4\") pod \"nova-cell1-db-create-6jbpz\" (UID: \"197a91a3-1cbf-40c2-bda0-84a389e78366\") " pod="openstack/nova-cell1-db-create-6jbpz" Sep 30 00:01:11 crc kubenswrapper[4922]: I0930 00:01:11.003704 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wx9vk" Sep 30 00:01:11 crc kubenswrapper[4922]: I0930 00:01:11.096247 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6jbpz" Sep 30 00:01:11 crc kubenswrapper[4922]: I0930 00:01:11.229484 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-ndjw5"] Sep 30 00:01:11 crc kubenswrapper[4922]: I0930 00:01:11.338578 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-ndjw5" event={"ID":"639a2c8b-1225-4c52-8da9-fd1e6d24f38b","Type":"ContainerStarted","Data":"6dfcd4895da8f076c5b95fac24703e136f1eb904c4a368ff7614a1aa5b75bd0e"} Sep 30 00:01:11 crc kubenswrapper[4922]: I0930 00:01:11.407550 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-6jbpz"] Sep 30 00:01:11 crc kubenswrapper[4922]: W0930 00:01:11.409265 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod197a91a3_1cbf_40c2_bda0_84a389e78366.slice/crio-7ce3d81d8ad882b4c1e8e27374e736e7a460b97c60cd99d3461bb65da68f80b9 WatchSource:0}: Error finding container 7ce3d81d8ad882b4c1e8e27374e736e7a460b97c60cd99d3461bb65da68f80b9: Status 404 returned error can't find the container with id 7ce3d81d8ad882b4c1e8e27374e736e7a460b97c60cd99d3461bb65da68f80b9 Sep 30 00:01:11 crc kubenswrapper[4922]: I0930 00:01:11.508939 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-wx9vk"] Sep 30 00:01:11 crc kubenswrapper[4922]: W0930 00:01:11.516883 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f158e45_b59f_4f85_95f2_4c9c5a410984.slice/crio-7d9836489ecff638bf9572e7a5a75ae3a0e1dee7bf95d8ac8e6c003e085a8622 WatchSource:0}: Error finding container 7d9836489ecff638bf9572e7a5a75ae3a0e1dee7bf95d8ac8e6c003e085a8622: Status 404 returned error can't find the container with id 7d9836489ecff638bf9572e7a5a75ae3a0e1dee7bf95d8ac8e6c003e085a8622 Sep 30 00:01:12 crc kubenswrapper[4922]: I0930 00:01:12.353974 4922 generic.go:334] "Generic (PLEG): container finished" podID="639a2c8b-1225-4c52-8da9-fd1e6d24f38b" containerID="4c5fa365c41225c547f67b2374dc1355fcbceacd408cde37d1f2951e28a55885" exitCode=0 Sep 30 00:01:12 crc kubenswrapper[4922]: I0930 00:01:12.356591 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-ndjw5" event={"ID":"639a2c8b-1225-4c52-8da9-fd1e6d24f38b","Type":"ContainerDied","Data":"4c5fa365c41225c547f67b2374dc1355fcbceacd408cde37d1f2951e28a55885"} Sep 30 00:01:12 crc kubenswrapper[4922]: I0930 00:01:12.361610 4922 generic.go:334] "Generic (PLEG): container finished" podID="197a91a3-1cbf-40c2-bda0-84a389e78366" containerID="42d181379f893cab83346a862727ae49b5b1c680a1b745d334542fe7e72dcd19" exitCode=0 Sep 30 00:01:12 crc kubenswrapper[4922]: I0930 00:01:12.361714 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6jbpz" event={"ID":"197a91a3-1cbf-40c2-bda0-84a389e78366","Type":"ContainerDied","Data":"42d181379f893cab83346a862727ae49b5b1c680a1b745d334542fe7e72dcd19"} Sep 30 00:01:12 crc kubenswrapper[4922]: I0930 00:01:12.361758 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6jbpz" event={"ID":"197a91a3-1cbf-40c2-bda0-84a389e78366","Type":"ContainerStarted","Data":"7ce3d81d8ad882b4c1e8e27374e736e7a460b97c60cd99d3461bb65da68f80b9"} Sep 30 00:01:12 crc kubenswrapper[4922]: I0930 00:01:12.365104 4922 generic.go:334] "Generic (PLEG): container finished" podID="6f158e45-b59f-4f85-95f2-4c9c5a410984" containerID="742fc4c33e22984c7512b89da2bcab454e1e64bf4a7ec3206a6c14b680c243dd" exitCode=0 Sep 30 00:01:12 crc kubenswrapper[4922]: I0930 00:01:12.365141 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wx9vk" event={"ID":"6f158e45-b59f-4f85-95f2-4c9c5a410984","Type":"ContainerDied","Data":"742fc4c33e22984c7512b89da2bcab454e1e64bf4a7ec3206a6c14b680c243dd"} Sep 30 00:01:12 crc kubenswrapper[4922]: I0930 00:01:12.365161 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wx9vk" event={"ID":"6f158e45-b59f-4f85-95f2-4c9c5a410984","Type":"ContainerStarted","Data":"7d9836489ecff638bf9572e7a5a75ae3a0e1dee7bf95d8ac8e6c003e085a8622"} Sep 30 00:01:13 crc kubenswrapper[4922]: I0930 00:01:13.894608 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-ndjw5" Sep 30 00:01:13 crc kubenswrapper[4922]: I0930 00:01:13.901964 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6jbpz" Sep 30 00:01:13 crc kubenswrapper[4922]: I0930 00:01:13.908426 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wx9vk" Sep 30 00:01:13 crc kubenswrapper[4922]: I0930 00:01:13.991799 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pzkf\" (UniqueName: \"kubernetes.io/projected/6f158e45-b59f-4f85-95f2-4c9c5a410984-kube-api-access-7pzkf\") pod \"6f158e45-b59f-4f85-95f2-4c9c5a410984\" (UID: \"6f158e45-b59f-4f85-95f2-4c9c5a410984\") " Sep 30 00:01:13 crc kubenswrapper[4922]: I0930 00:01:13.992233 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kq6j4\" (UniqueName: \"kubernetes.io/projected/197a91a3-1cbf-40c2-bda0-84a389e78366-kube-api-access-kq6j4\") pod \"197a91a3-1cbf-40c2-bda0-84a389e78366\" (UID: \"197a91a3-1cbf-40c2-bda0-84a389e78366\") " Sep 30 00:01:13 crc kubenswrapper[4922]: I0930 00:01:13.992297 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twvf8\" (UniqueName: \"kubernetes.io/projected/639a2c8b-1225-4c52-8da9-fd1e6d24f38b-kube-api-access-twvf8\") pod \"639a2c8b-1225-4c52-8da9-fd1e6d24f38b\" (UID: \"639a2c8b-1225-4c52-8da9-fd1e6d24f38b\") " Sep 30 00:01:13 crc kubenswrapper[4922]: I0930 00:01:13.999781 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/639a2c8b-1225-4c52-8da9-fd1e6d24f38b-kube-api-access-twvf8" (OuterVolumeSpecName: "kube-api-access-twvf8") pod "639a2c8b-1225-4c52-8da9-fd1e6d24f38b" (UID: "639a2c8b-1225-4c52-8da9-fd1e6d24f38b"). InnerVolumeSpecName "kube-api-access-twvf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.002454 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/197a91a3-1cbf-40c2-bda0-84a389e78366-kube-api-access-kq6j4" (OuterVolumeSpecName: "kube-api-access-kq6j4") pod "197a91a3-1cbf-40c2-bda0-84a389e78366" (UID: "197a91a3-1cbf-40c2-bda0-84a389e78366"). InnerVolumeSpecName "kube-api-access-kq6j4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.004590 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f158e45-b59f-4f85-95f2-4c9c5a410984-kube-api-access-7pzkf" (OuterVolumeSpecName: "kube-api-access-7pzkf") pod "6f158e45-b59f-4f85-95f2-4c9c5a410984" (UID: "6f158e45-b59f-4f85-95f2-4c9c5a410984"). InnerVolumeSpecName "kube-api-access-7pzkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.095582 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kq6j4\" (UniqueName: \"kubernetes.io/projected/197a91a3-1cbf-40c2-bda0-84a389e78366-kube-api-access-kq6j4\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.095957 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twvf8\" (UniqueName: \"kubernetes.io/projected/639a2c8b-1225-4c52-8da9-fd1e6d24f38b-kube-api-access-twvf8\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.095978 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pzkf\" (UniqueName: \"kubernetes.io/projected/6f158e45-b59f-4f85-95f2-4c9c5a410984-kube-api-access-7pzkf\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.389960 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-wx9vk" event={"ID":"6f158e45-b59f-4f85-95f2-4c9c5a410984","Type":"ContainerDied","Data":"7d9836489ecff638bf9572e7a5a75ae3a0e1dee7bf95d8ac8e6c003e085a8622"} Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.390006 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d9836489ecff638bf9572e7a5a75ae3a0e1dee7bf95d8ac8e6c003e085a8622" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.390093 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-wx9vk" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.392545 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-ndjw5" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.392585 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-ndjw5" event={"ID":"639a2c8b-1225-4c52-8da9-fd1e6d24f38b","Type":"ContainerDied","Data":"6dfcd4895da8f076c5b95fac24703e136f1eb904c4a368ff7614a1aa5b75bd0e"} Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.392632 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6dfcd4895da8f076c5b95fac24703e136f1eb904c4a368ff7614a1aa5b75bd0e" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.395182 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-6jbpz" event={"ID":"197a91a3-1cbf-40c2-bda0-84a389e78366","Type":"ContainerDied","Data":"7ce3d81d8ad882b4c1e8e27374e736e7a460b97c60cd99d3461bb65da68f80b9"} Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.395224 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ce3d81d8ad882b4c1e8e27374e736e7a460b97c60cd99d3461bb65da68f80b9" Sep 30 00:01:14 crc kubenswrapper[4922]: I0930 00:01:14.395288 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-6jbpz" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.853176 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-a486-account-create-m77qk"] Sep 30 00:01:20 crc kubenswrapper[4922]: E0930 00:01:20.855677 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="639a2c8b-1225-4c52-8da9-fd1e6d24f38b" containerName="mariadb-database-create" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.855695 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="639a2c8b-1225-4c52-8da9-fd1e6d24f38b" containerName="mariadb-database-create" Sep 30 00:01:20 crc kubenswrapper[4922]: E0930 00:01:20.855713 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f158e45-b59f-4f85-95f2-4c9c5a410984" containerName="mariadb-database-create" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.855719 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f158e45-b59f-4f85-95f2-4c9c5a410984" containerName="mariadb-database-create" Sep 30 00:01:20 crc kubenswrapper[4922]: E0930 00:01:20.855765 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="197a91a3-1cbf-40c2-bda0-84a389e78366" containerName="mariadb-database-create" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.855773 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="197a91a3-1cbf-40c2-bda0-84a389e78366" containerName="mariadb-database-create" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.856027 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f158e45-b59f-4f85-95f2-4c9c5a410984" containerName="mariadb-database-create" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.856052 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="197a91a3-1cbf-40c2-bda0-84a389e78366" containerName="mariadb-database-create" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.856066 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="639a2c8b-1225-4c52-8da9-fd1e6d24f38b" containerName="mariadb-database-create" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.857125 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a486-account-create-m77qk" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.874236 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.898138 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a486-account-create-m77qk"] Sep 30 00:01:20 crc kubenswrapper[4922]: I0930 00:01:20.953949 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqpkn\" (UniqueName: \"kubernetes.io/projected/25d16583-e8ae-480d-8814-35e0051537f7-kube-api-access-kqpkn\") pod \"nova-api-a486-account-create-m77qk\" (UID: \"25d16583-e8ae-480d-8814-35e0051537f7\") " pod="openstack/nova-api-a486-account-create-m77qk" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.056872 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqpkn\" (UniqueName: \"kubernetes.io/projected/25d16583-e8ae-480d-8814-35e0051537f7-kube-api-access-kqpkn\") pod \"nova-api-a486-account-create-m77qk\" (UID: \"25d16583-e8ae-480d-8814-35e0051537f7\") " pod="openstack/nova-api-a486-account-create-m77qk" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.068962 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-534a-account-create-mntx5"] Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.070956 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-534a-account-create-mntx5" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.073789 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.083639 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-534a-account-create-mntx5"] Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.100690 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqpkn\" (UniqueName: \"kubernetes.io/projected/25d16583-e8ae-480d-8814-35e0051537f7-kube-api-access-kqpkn\") pod \"nova-api-a486-account-create-m77qk\" (UID: \"25d16583-e8ae-480d-8814-35e0051537f7\") " pod="openstack/nova-api-a486-account-create-m77qk" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.157590 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-5829-account-create-86h9c"] Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.159530 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5829-account-create-86h9c" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.160682 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95slh\" (UniqueName: \"kubernetes.io/projected/9112b115-b748-4278-8a10-01ccbadf6c77-kube-api-access-95slh\") pod \"nova-cell0-534a-account-create-mntx5\" (UID: \"9112b115-b748-4278-8a10-01ccbadf6c77\") " pod="openstack/nova-cell0-534a-account-create-mntx5" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.161803 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.162930 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5829-account-create-86h9c"] Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.199343 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a486-account-create-m77qk" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.262751 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95slh\" (UniqueName: \"kubernetes.io/projected/9112b115-b748-4278-8a10-01ccbadf6c77-kube-api-access-95slh\") pod \"nova-cell0-534a-account-create-mntx5\" (UID: \"9112b115-b748-4278-8a10-01ccbadf6c77\") " pod="openstack/nova-cell0-534a-account-create-mntx5" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.262795 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wnkj\" (UniqueName: \"kubernetes.io/projected/d1d4f130-a86e-4fca-aa7b-79013b3b18a0-kube-api-access-9wnkj\") pod \"nova-cell1-5829-account-create-86h9c\" (UID: \"d1d4f130-a86e-4fca-aa7b-79013b3b18a0\") " pod="openstack/nova-cell1-5829-account-create-86h9c" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.279266 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95slh\" (UniqueName: \"kubernetes.io/projected/9112b115-b748-4278-8a10-01ccbadf6c77-kube-api-access-95slh\") pod \"nova-cell0-534a-account-create-mntx5\" (UID: \"9112b115-b748-4278-8a10-01ccbadf6c77\") " pod="openstack/nova-cell0-534a-account-create-mntx5" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.365146 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wnkj\" (UniqueName: \"kubernetes.io/projected/d1d4f130-a86e-4fca-aa7b-79013b3b18a0-kube-api-access-9wnkj\") pod \"nova-cell1-5829-account-create-86h9c\" (UID: \"d1d4f130-a86e-4fca-aa7b-79013b3b18a0\") " pod="openstack/nova-cell1-5829-account-create-86h9c" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.386138 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wnkj\" (UniqueName: \"kubernetes.io/projected/d1d4f130-a86e-4fca-aa7b-79013b3b18a0-kube-api-access-9wnkj\") pod \"nova-cell1-5829-account-create-86h9c\" (UID: \"d1d4f130-a86e-4fca-aa7b-79013b3b18a0\") " pod="openstack/nova-cell1-5829-account-create-86h9c" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.400265 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-534a-account-create-mntx5" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.485252 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5829-account-create-86h9c" Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.644832 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-a486-account-create-m77qk"] Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.874944 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-534a-account-create-mntx5"] Sep 30 00:01:21 crc kubenswrapper[4922]: W0930 00:01:21.885060 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9112b115_b748_4278_8a10_01ccbadf6c77.slice/crio-26c3386199b639df6ddda9c225fb03cfe7c3dae341ed640447383bb830c42171 WatchSource:0}: Error finding container 26c3386199b639df6ddda9c225fb03cfe7c3dae341ed640447383bb830c42171: Status 404 returned error can't find the container with id 26c3386199b639df6ddda9c225fb03cfe7c3dae341ed640447383bb830c42171 Sep 30 00:01:21 crc kubenswrapper[4922]: I0930 00:01:21.953779 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-5829-account-create-86h9c"] Sep 30 00:01:21 crc kubenswrapper[4922]: W0930 00:01:21.961522 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1d4f130_a86e_4fca_aa7b_79013b3b18a0.slice/crio-703b296c9c9f8dba403f397a78626d5531cc4932ed22d2b3487f534dffd8a81b WatchSource:0}: Error finding container 703b296c9c9f8dba403f397a78626d5531cc4932ed22d2b3487f534dffd8a81b: Status 404 returned error can't find the container with id 703b296c9c9f8dba403f397a78626d5531cc4932ed22d2b3487f534dffd8a81b Sep 30 00:01:22 crc kubenswrapper[4922]: I0930 00:01:22.486882 4922 generic.go:334] "Generic (PLEG): container finished" podID="25d16583-e8ae-480d-8814-35e0051537f7" containerID="9a7e3e16a8f22de1aca49d40241029e58abe48de5e00bd748ffa2d2e8dcb6333" exitCode=0 Sep 30 00:01:22 crc kubenswrapper[4922]: I0930 00:01:22.486973 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a486-account-create-m77qk" event={"ID":"25d16583-e8ae-480d-8814-35e0051537f7","Type":"ContainerDied","Data":"9a7e3e16a8f22de1aca49d40241029e58abe48de5e00bd748ffa2d2e8dcb6333"} Sep 30 00:01:22 crc kubenswrapper[4922]: I0930 00:01:22.487001 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a486-account-create-m77qk" event={"ID":"25d16583-e8ae-480d-8814-35e0051537f7","Type":"ContainerStarted","Data":"712fe91ca41be1b1fee61ffdfc35cc809909e3e419cc34cc37b2af99d38e4cd4"} Sep 30 00:01:22 crc kubenswrapper[4922]: I0930 00:01:22.489587 4922 generic.go:334] "Generic (PLEG): container finished" podID="9112b115-b748-4278-8a10-01ccbadf6c77" containerID="109a6286f8aedb4b5bcbf1b2b428cba1e3d9c15de19055ca0934a454b14a9783" exitCode=0 Sep 30 00:01:22 crc kubenswrapper[4922]: I0930 00:01:22.489664 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-534a-account-create-mntx5" event={"ID":"9112b115-b748-4278-8a10-01ccbadf6c77","Type":"ContainerDied","Data":"109a6286f8aedb4b5bcbf1b2b428cba1e3d9c15de19055ca0934a454b14a9783"} Sep 30 00:01:22 crc kubenswrapper[4922]: I0930 00:01:22.489699 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-534a-account-create-mntx5" event={"ID":"9112b115-b748-4278-8a10-01ccbadf6c77","Type":"ContainerStarted","Data":"26c3386199b639df6ddda9c225fb03cfe7c3dae341ed640447383bb830c42171"} Sep 30 00:01:22 crc kubenswrapper[4922]: I0930 00:01:22.491814 4922 generic.go:334] "Generic (PLEG): container finished" podID="d1d4f130-a86e-4fca-aa7b-79013b3b18a0" containerID="ad5d44c5e60f154a395c690d22daeffb0459ce8e058e9b20360b64975c9077b6" exitCode=0 Sep 30 00:01:22 crc kubenswrapper[4922]: I0930 00:01:22.491849 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5829-account-create-86h9c" event={"ID":"d1d4f130-a86e-4fca-aa7b-79013b3b18a0","Type":"ContainerDied","Data":"ad5d44c5e60f154a395c690d22daeffb0459ce8e058e9b20360b64975c9077b6"} Sep 30 00:01:22 crc kubenswrapper[4922]: I0930 00:01:22.491867 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5829-account-create-86h9c" event={"ID":"d1d4f130-a86e-4fca-aa7b-79013b3b18a0","Type":"ContainerStarted","Data":"703b296c9c9f8dba403f397a78626d5531cc4932ed22d2b3487f534dffd8a81b"} Sep 30 00:01:23 crc kubenswrapper[4922]: I0930 00:01:23.995422 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a486-account-create-m77qk" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.003298 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-534a-account-create-mntx5" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.011721 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5829-account-create-86h9c" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.127410 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wnkj\" (UniqueName: \"kubernetes.io/projected/d1d4f130-a86e-4fca-aa7b-79013b3b18a0-kube-api-access-9wnkj\") pod \"d1d4f130-a86e-4fca-aa7b-79013b3b18a0\" (UID: \"d1d4f130-a86e-4fca-aa7b-79013b3b18a0\") " Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.127698 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95slh\" (UniqueName: \"kubernetes.io/projected/9112b115-b748-4278-8a10-01ccbadf6c77-kube-api-access-95slh\") pod \"9112b115-b748-4278-8a10-01ccbadf6c77\" (UID: \"9112b115-b748-4278-8a10-01ccbadf6c77\") " Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.127898 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqpkn\" (UniqueName: \"kubernetes.io/projected/25d16583-e8ae-480d-8814-35e0051537f7-kube-api-access-kqpkn\") pod \"25d16583-e8ae-480d-8814-35e0051537f7\" (UID: \"25d16583-e8ae-480d-8814-35e0051537f7\") " Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.133136 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1d4f130-a86e-4fca-aa7b-79013b3b18a0-kube-api-access-9wnkj" (OuterVolumeSpecName: "kube-api-access-9wnkj") pod "d1d4f130-a86e-4fca-aa7b-79013b3b18a0" (UID: "d1d4f130-a86e-4fca-aa7b-79013b3b18a0"). InnerVolumeSpecName "kube-api-access-9wnkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.133890 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9112b115-b748-4278-8a10-01ccbadf6c77-kube-api-access-95slh" (OuterVolumeSpecName: "kube-api-access-95slh") pod "9112b115-b748-4278-8a10-01ccbadf6c77" (UID: "9112b115-b748-4278-8a10-01ccbadf6c77"). InnerVolumeSpecName "kube-api-access-95slh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.134574 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25d16583-e8ae-480d-8814-35e0051537f7-kube-api-access-kqpkn" (OuterVolumeSpecName: "kube-api-access-kqpkn") pod "25d16583-e8ae-480d-8814-35e0051537f7" (UID: "25d16583-e8ae-480d-8814-35e0051537f7"). InnerVolumeSpecName "kube-api-access-kqpkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.230948 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wnkj\" (UniqueName: \"kubernetes.io/projected/d1d4f130-a86e-4fca-aa7b-79013b3b18a0-kube-api-access-9wnkj\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.231304 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95slh\" (UniqueName: \"kubernetes.io/projected/9112b115-b748-4278-8a10-01ccbadf6c77-kube-api-access-95slh\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.231322 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqpkn\" (UniqueName: \"kubernetes.io/projected/25d16583-e8ae-480d-8814-35e0051537f7-kube-api-access-kqpkn\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.531712 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-534a-account-create-mntx5" event={"ID":"9112b115-b748-4278-8a10-01ccbadf6c77","Type":"ContainerDied","Data":"26c3386199b639df6ddda9c225fb03cfe7c3dae341ed640447383bb830c42171"} Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.531758 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-534a-account-create-mntx5" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.531783 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26c3386199b639df6ddda9c225fb03cfe7c3dae341ed640447383bb830c42171" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.533575 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-5829-account-create-86h9c" event={"ID":"d1d4f130-a86e-4fca-aa7b-79013b3b18a0","Type":"ContainerDied","Data":"703b296c9c9f8dba403f397a78626d5531cc4932ed22d2b3487f534dffd8a81b"} Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.533613 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="703b296c9c9f8dba403f397a78626d5531cc4932ed22d2b3487f534dffd8a81b" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.533631 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-5829-account-create-86h9c" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.536133 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-a486-account-create-m77qk" event={"ID":"25d16583-e8ae-480d-8814-35e0051537f7","Type":"ContainerDied","Data":"712fe91ca41be1b1fee61ffdfc35cc809909e3e419cc34cc37b2af99d38e4cd4"} Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.536170 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="712fe91ca41be1b1fee61ffdfc35cc809909e3e419cc34cc37b2af99d38e4cd4" Sep 30 00:01:24 crc kubenswrapper[4922]: I0930 00:01:24.536283 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-a486-account-create-m77qk" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.276380 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nt96d"] Sep 30 00:01:26 crc kubenswrapper[4922]: E0930 00:01:26.276916 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1d4f130-a86e-4fca-aa7b-79013b3b18a0" containerName="mariadb-account-create" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.276936 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1d4f130-a86e-4fca-aa7b-79013b3b18a0" containerName="mariadb-account-create" Sep 30 00:01:26 crc kubenswrapper[4922]: E0930 00:01:26.276961 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25d16583-e8ae-480d-8814-35e0051537f7" containerName="mariadb-account-create" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.276972 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="25d16583-e8ae-480d-8814-35e0051537f7" containerName="mariadb-account-create" Sep 30 00:01:26 crc kubenswrapper[4922]: E0930 00:01:26.277022 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9112b115-b748-4278-8a10-01ccbadf6c77" containerName="mariadb-account-create" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.277034 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9112b115-b748-4278-8a10-01ccbadf6c77" containerName="mariadb-account-create" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.277299 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1d4f130-a86e-4fca-aa7b-79013b3b18a0" containerName="mariadb-account-create" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.277347 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9112b115-b748-4278-8a10-01ccbadf6c77" containerName="mariadb-account-create" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.277360 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="25d16583-e8ae-480d-8814-35e0051537f7" containerName="mariadb-account-create" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.278484 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.280595 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.281115 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.281770 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lnsjz" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.292073 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nt96d"] Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.379164 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.379517 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-config-data\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.379656 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-scripts\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.379728 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fc4r\" (UniqueName: \"kubernetes.io/projected/4ea179b5-caac-40f8-87d1-a207140c8752-kube-api-access-6fc4r\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.482003 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fc4r\" (UniqueName: \"kubernetes.io/projected/4ea179b5-caac-40f8-87d1-a207140c8752-kube-api-access-6fc4r\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.482121 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.482195 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-config-data\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.482321 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-scripts\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.488554 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.488726 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-config-data\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.489151 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-scripts\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.508244 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fc4r\" (UniqueName: \"kubernetes.io/projected/4ea179b5-caac-40f8-87d1-a207140c8752-kube-api-access-6fc4r\") pod \"nova-cell0-conductor-db-sync-nt96d\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:26 crc kubenswrapper[4922]: I0930 00:01:26.623282 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:27 crc kubenswrapper[4922]: I0930 00:01:27.163723 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nt96d"] Sep 30 00:01:27 crc kubenswrapper[4922]: I0930 00:01:27.567891 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nt96d" event={"ID":"4ea179b5-caac-40f8-87d1-a207140c8752","Type":"ContainerStarted","Data":"7813a14f6188ffd7ae61c55494fe3e2e1b2c902514958b3274892cce1d0e009f"} Sep 30 00:01:27 crc kubenswrapper[4922]: I0930 00:01:27.568224 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nt96d" event={"ID":"4ea179b5-caac-40f8-87d1-a207140c8752","Type":"ContainerStarted","Data":"109f4f88028bdab6a9ff0c0f43289ad0ffa04c0a96f8467f0ea4a89377a14fae"} Sep 30 00:01:27 crc kubenswrapper[4922]: I0930 00:01:27.595703 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-nt96d" podStartSLOduration=1.595679327 podStartE2EDuration="1.595679327s" podCreationTimestamp="2025-09-30 00:01:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:27.59093321 +0000 UTC m=+5691.901222063" watchObservedRunningTime="2025-09-30 00:01:27.595679327 +0000 UTC m=+5691.905968150" Sep 30 00:01:32 crc kubenswrapper[4922]: I0930 00:01:32.633043 4922 generic.go:334] "Generic (PLEG): container finished" podID="4ea179b5-caac-40f8-87d1-a207140c8752" containerID="7813a14f6188ffd7ae61c55494fe3e2e1b2c902514958b3274892cce1d0e009f" exitCode=0 Sep 30 00:01:32 crc kubenswrapper[4922]: I0930 00:01:32.633178 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nt96d" event={"ID":"4ea179b5-caac-40f8-87d1-a207140c8752","Type":"ContainerDied","Data":"7813a14f6188ffd7ae61c55494fe3e2e1b2c902514958b3274892cce1d0e009f"} Sep 30 00:01:33 crc kubenswrapper[4922]: I0930 00:01:33.954870 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.066276 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-scripts\") pod \"4ea179b5-caac-40f8-87d1-a207140c8752\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.066916 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fc4r\" (UniqueName: \"kubernetes.io/projected/4ea179b5-caac-40f8-87d1-a207140c8752-kube-api-access-6fc4r\") pod \"4ea179b5-caac-40f8-87d1-a207140c8752\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.066995 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-config-data\") pod \"4ea179b5-caac-40f8-87d1-a207140c8752\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.067035 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-combined-ca-bundle\") pod \"4ea179b5-caac-40f8-87d1-a207140c8752\" (UID: \"4ea179b5-caac-40f8-87d1-a207140c8752\") " Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.072812 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-scripts" (OuterVolumeSpecName: "scripts") pod "4ea179b5-caac-40f8-87d1-a207140c8752" (UID: "4ea179b5-caac-40f8-87d1-a207140c8752"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.075122 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ea179b5-caac-40f8-87d1-a207140c8752-kube-api-access-6fc4r" (OuterVolumeSpecName: "kube-api-access-6fc4r") pod "4ea179b5-caac-40f8-87d1-a207140c8752" (UID: "4ea179b5-caac-40f8-87d1-a207140c8752"). InnerVolumeSpecName "kube-api-access-6fc4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.090582 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4ea179b5-caac-40f8-87d1-a207140c8752" (UID: "4ea179b5-caac-40f8-87d1-a207140c8752"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.093256 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-config-data" (OuterVolumeSpecName: "config-data") pod "4ea179b5-caac-40f8-87d1-a207140c8752" (UID: "4ea179b5-caac-40f8-87d1-a207140c8752"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.170249 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fc4r\" (UniqueName: \"kubernetes.io/projected/4ea179b5-caac-40f8-87d1-a207140c8752-kube-api-access-6fc4r\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.170298 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.170317 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.170333 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ea179b5-caac-40f8-87d1-a207140c8752-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.653377 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-nt96d" event={"ID":"4ea179b5-caac-40f8-87d1-a207140c8752","Type":"ContainerDied","Data":"109f4f88028bdab6a9ff0c0f43289ad0ffa04c0a96f8467f0ea4a89377a14fae"} Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.653711 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="109f4f88028bdab6a9ff0c0f43289ad0ffa04c0a96f8467f0ea4a89377a14fae" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.653460 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-nt96d" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.737590 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:01:34 crc kubenswrapper[4922]: E0930 00:01:34.738270 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ea179b5-caac-40f8-87d1-a207140c8752" containerName="nova-cell0-conductor-db-sync" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.738297 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ea179b5-caac-40f8-87d1-a207140c8752" containerName="nova-cell0-conductor-db-sync" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.738634 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ea179b5-caac-40f8-87d1-a207140c8752" containerName="nova-cell0-conductor-db-sync" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.739666 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.742509 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.742776 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lnsjz" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.754035 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.885809 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.885903 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.885922 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9b5vt\" (UniqueName: \"kubernetes.io/projected/832e746b-286e-4134-8e71-448ee79cf1c5-kube-api-access-9b5vt\") pod \"nova-cell0-conductor-0\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.987729 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.987792 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9b5vt\" (UniqueName: \"kubernetes.io/projected/832e746b-286e-4134-8e71-448ee79cf1c5-kube-api-access-9b5vt\") pod \"nova-cell0-conductor-0\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.987973 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.995568 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:34 crc kubenswrapper[4922]: I0930 00:01:34.995689 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:35 crc kubenswrapper[4922]: I0930 00:01:35.022368 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9b5vt\" (UniqueName: \"kubernetes.io/projected/832e746b-286e-4134-8e71-448ee79cf1c5-kube-api-access-9b5vt\") pod \"nova-cell0-conductor-0\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:35 crc kubenswrapper[4922]: I0930 00:01:35.074179 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:35 crc kubenswrapper[4922]: I0930 00:01:35.407454 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:01:35 crc kubenswrapper[4922]: W0930 00:01:35.408561 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod832e746b_286e_4134_8e71_448ee79cf1c5.slice/crio-d931a2666c8efe798983819c008288cf0164449884f186b5e53c2ddf3e68e8a8 WatchSource:0}: Error finding container d931a2666c8efe798983819c008288cf0164449884f186b5e53c2ddf3e68e8a8: Status 404 returned error can't find the container with id d931a2666c8efe798983819c008288cf0164449884f186b5e53c2ddf3e68e8a8 Sep 30 00:01:35 crc kubenswrapper[4922]: I0930 00:01:35.668652 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"832e746b-286e-4134-8e71-448ee79cf1c5","Type":"ContainerStarted","Data":"df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe"} Sep 30 00:01:35 crc kubenswrapper[4922]: I0930 00:01:35.669005 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"832e746b-286e-4134-8e71-448ee79cf1c5","Type":"ContainerStarted","Data":"d931a2666c8efe798983819c008288cf0164449884f186b5e53c2ddf3e68e8a8"} Sep 30 00:01:35 crc kubenswrapper[4922]: I0930 00:01:35.669443 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:35 crc kubenswrapper[4922]: I0930 00:01:35.691044 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.691020415 podStartE2EDuration="1.691020415s" podCreationTimestamp="2025-09-30 00:01:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:35.687315474 +0000 UTC m=+5699.997604297" watchObservedRunningTime="2025-09-30 00:01:35.691020415 +0000 UTC m=+5700.001309248" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.119705 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.670187 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-vl9l2"] Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.671946 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.676842 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.679473 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.682928 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-vl9l2"] Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.810823 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.812107 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.814032 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.814083 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fblvs\" (UniqueName: \"kubernetes.io/projected/b6cbedef-764b-476b-a191-00443706e40d-kube-api-access-fblvs\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.814108 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-scripts\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.814162 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-config-data\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.821288 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.832307 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.888568 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.890319 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.902127 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.908528 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.910027 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.914688 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.916568 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.916620 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-config-data\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.916640 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wslhq\" (UniqueName: \"kubernetes.io/projected/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-kube-api-access-wslhq\") pod \"nova-cell1-novncproxy-0\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.916786 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.916835 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fblvs\" (UniqueName: \"kubernetes.io/projected/b6cbedef-764b-476b-a191-00443706e40d-kube-api-access-fblvs\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.916864 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-scripts\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.916901 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.926741 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-config-data\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.927061 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.927101 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-scripts\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.944989 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fblvs\" (UniqueName: \"kubernetes.io/projected/b6cbedef-764b-476b-a191-00443706e40d-kube-api-access-fblvs\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.946525 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.967784 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-vl9l2\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.990436 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d79b984cc-pjwcs"] Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.991838 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:40 crc kubenswrapper[4922]: I0930 00:01:40.999529 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.000153 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.001075 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.006385 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.016483 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019023 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7a80858-45ae-47af-961b-b88bff637535-logs\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019073 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-config-data\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019117 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx72l\" (UniqueName: \"kubernetes.io/projected/e7a80858-45ae-47af-961b-b88bff637535-kube-api-access-tx72l\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019185 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-config-data\") pod \"nova-scheduler-0\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019203 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p7ts\" (UniqueName: \"kubernetes.io/projected/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-kube-api-access-6p7ts\") pod \"nova-scheduler-0\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019260 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019310 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019348 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019378 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.019421 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wslhq\" (UniqueName: \"kubernetes.io/projected/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-kube-api-access-wslhq\") pod \"nova-cell1-novncproxy-0\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.025081 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d79b984cc-pjwcs"] Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.025592 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.039240 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wslhq\" (UniqueName: \"kubernetes.io/projected/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-kube-api-access-wslhq\") pod \"nova-cell1-novncproxy-0\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.060586 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121259 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-config-data\") pod \"nova-scheduler-0\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121626 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p7ts\" (UniqueName: \"kubernetes.io/projected/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-kube-api-access-6p7ts\") pod \"nova-scheduler-0\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121662 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121700 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121725 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-config\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121749 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkbps\" (UniqueName: \"kubernetes.io/projected/1c84a740-1f70-400d-a56a-ac889071f45e-kube-api-access-qkbps\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121768 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-nb\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121793 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-dns-svc\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121814 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7a80858-45ae-47af-961b-b88bff637535-logs\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121836 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-config-data\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121854 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx72l\" (UniqueName: \"kubernetes.io/projected/e7a80858-45ae-47af-961b-b88bff637535-kube-api-access-tx72l\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121873 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb32e6c9-94c8-4b0b-855a-2635717ff719-logs\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121893 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-config-data\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121915 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7g67\" (UniqueName: \"kubernetes.io/projected/bb32e6c9-94c8-4b0b-855a-2635717ff719-kube-api-access-x7g67\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121936 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-sb\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.121954 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.126222 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-config-data\") pod \"nova-scheduler-0\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.128023 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7a80858-45ae-47af-961b-b88bff637535-logs\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.130204 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.141942 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.142489 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-config-data\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.143763 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.146676 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx72l\" (UniqueName: \"kubernetes.io/projected/e7a80858-45ae-47af-961b-b88bff637535-kube-api-access-tx72l\") pod \"nova-metadata-0\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.148085 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p7ts\" (UniqueName: \"kubernetes.io/projected/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-kube-api-access-6p7ts\") pod \"nova-scheduler-0\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.223678 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkbps\" (UniqueName: \"kubernetes.io/projected/1c84a740-1f70-400d-a56a-ac889071f45e-kube-api-access-qkbps\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.223752 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-nb\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.223819 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-dns-svc\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.223919 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb32e6c9-94c8-4b0b-855a-2635717ff719-logs\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.223958 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-config-data\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.224011 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7g67\" (UniqueName: \"kubernetes.io/projected/bb32e6c9-94c8-4b0b-855a-2635717ff719-kube-api-access-x7g67\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.224063 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-sb\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.224100 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.224299 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-config\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.225817 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb32e6c9-94c8-4b0b-855a-2635717ff719-logs\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.225954 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-nb\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.225979 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-dns-svc\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.226297 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-sb\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.226304 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-config\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.229844 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.238789 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkbps\" (UniqueName: \"kubernetes.io/projected/1c84a740-1f70-400d-a56a-ac889071f45e-kube-api-access-qkbps\") pod \"dnsmasq-dns-d79b984cc-pjwcs\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.238886 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-config-data\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.247503 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7g67\" (UniqueName: \"kubernetes.io/projected/bb32e6c9-94c8-4b0b-855a-2635717ff719-kube-api-access-x7g67\") pod \"nova-api-0\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.316608 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.409432 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.420894 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.444259 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-vl9l2"] Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.461162 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.592960 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.736335 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.740212 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vl9l2" event={"ID":"b6cbedef-764b-476b-a191-00443706e40d","Type":"ContainerStarted","Data":"f29183db1b8e1156b5b763de87dfc3c18da749229f3a705e89a995b5dc99feb6"} Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.740273 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vl9l2" event={"ID":"b6cbedef-764b-476b-a191-00443706e40d","Type":"ContainerStarted","Data":"95ddfd5ddd48de8c733fd7c91f9317caaf12e0bd3ec2561c31a5d1289677f7be"} Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.742372 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"21a2e46f-ece9-4faa-bb2e-d040c4501c1d","Type":"ContainerStarted","Data":"8946e0e4569b546e0fbc29faecf043a10d9fd5558293900510d9f5106bcfe77f"} Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.771437 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-vl9l2" podStartSLOduration=1.771420668 podStartE2EDuration="1.771420668s" podCreationTimestamp="2025-09-30 00:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:41.759322189 +0000 UTC m=+5706.069611002" watchObservedRunningTime="2025-09-30 00:01:41.771420668 +0000 UTC m=+5706.081709481" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.846226 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.933632 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.965044 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d79b984cc-pjwcs"] Sep 30 00:01:41 crc kubenswrapper[4922]: W0930 00:01:41.983603 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c84a740_1f70_400d_a56a_ac889071f45e.slice/crio-14b0db0c8cfaa0a047cf39948678f25605ab352b22dcc440d4ce03b7ab7b9ceb WatchSource:0}: Error finding container 14b0db0c8cfaa0a047cf39948678f25605ab352b22dcc440d4ce03b7ab7b9ceb: Status 404 returned error can't find the container with id 14b0db0c8cfaa0a047cf39948678f25605ab352b22dcc440d4ce03b7ab7b9ceb Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.984664 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmp5d"] Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.985973 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.993378 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 00:01:41 crc kubenswrapper[4922]: I0930 00:01:41.993984 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.004724 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmp5d"] Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.147584 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-scripts\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.147641 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.147855 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5r7p\" (UniqueName: \"kubernetes.io/projected/9539613f-19af-430f-bcbc-4dd019ba71bf-kube-api-access-w5r7p\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.147979 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-config-data\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.249206 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-scripts\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.249250 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.249301 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5r7p\" (UniqueName: \"kubernetes.io/projected/9539613f-19af-430f-bcbc-4dd019ba71bf-kube-api-access-w5r7p\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.249346 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-config-data\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.265066 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.265529 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-scripts\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.265837 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-config-data\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.267553 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5r7p\" (UniqueName: \"kubernetes.io/projected/9539613f-19af-430f-bcbc-4dd019ba71bf-kube-api-access-w5r7p\") pod \"nova-cell1-conductor-db-sync-bmp5d\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.277095 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.734667 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmp5d"] Sep 30 00:01:42 crc kubenswrapper[4922]: W0930 00:01:42.736850 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9539613f_19af_430f_bcbc_4dd019ba71bf.slice/crio-b8b9e87494981fbb0bac24bcfef7f711090df1c27ae8dbf942871366e848e5e4 WatchSource:0}: Error finding container b8b9e87494981fbb0bac24bcfef7f711090df1c27ae8dbf942871366e848e5e4: Status 404 returned error can't find the container with id b8b9e87494981fbb0bac24bcfef7f711090df1c27ae8dbf942871366e848e5e4 Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.751255 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e7a80858-45ae-47af-961b-b88bff637535","Type":"ContainerStarted","Data":"7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.751494 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e7a80858-45ae-47af-961b-b88bff637535","Type":"ContainerStarted","Data":"b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.751504 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e7a80858-45ae-47af-961b-b88bff637535","Type":"ContainerStarted","Data":"8528b38cf742d6bc6ccace475e93a55faadd1e2ef3bc0b7d34061b7840540f9b"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.753697 4922 generic.go:334] "Generic (PLEG): container finished" podID="1c84a740-1f70-400d-a56a-ac889071f45e" containerID="0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306" exitCode=0 Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.753754 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" event={"ID":"1c84a740-1f70-400d-a56a-ac889071f45e","Type":"ContainerDied","Data":"0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.753770 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" event={"ID":"1c84a740-1f70-400d-a56a-ac889071f45e","Type":"ContainerStarted","Data":"14b0db0c8cfaa0a047cf39948678f25605ab352b22dcc440d4ce03b7ab7b9ceb"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.762565 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmp5d" event={"ID":"9539613f-19af-430f-bcbc-4dd019ba71bf","Type":"ContainerStarted","Data":"b8b9e87494981fbb0bac24bcfef7f711090df1c27ae8dbf942871366e848e5e4"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.773066 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"21a2e46f-ece9-4faa-bb2e-d040c4501c1d","Type":"ContainerStarted","Data":"cc9707d0d49c0441fa34d40bf19b9503f73b99d595caf2f4c6458d520dd7ea36"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.784213 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.784196716 podStartE2EDuration="2.784196716s" podCreationTimestamp="2025-09-30 00:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:42.77296916 +0000 UTC m=+5707.083257973" watchObservedRunningTime="2025-09-30 00:01:42.784196716 +0000 UTC m=+5707.094485529" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.792228 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e67ed73c-4cc5-4b00-b10e-9ed3273e51df","Type":"ContainerStarted","Data":"99c4fc21ab26e6e6a9e5bb34334e11d27189f9de9e8bd8c08a6d29794c3fce70"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.792272 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e67ed73c-4cc5-4b00-b10e-9ed3273e51df","Type":"ContainerStarted","Data":"c7583e8915f1e78288f461a27ccd6232c4ba6968fb6fd87935e189f6eb57b1db"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.804431 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb32e6c9-94c8-4b0b-855a-2635717ff719","Type":"ContainerStarted","Data":"994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.804643 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb32e6c9-94c8-4b0b-855a-2635717ff719","Type":"ContainerStarted","Data":"4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.804708 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb32e6c9-94c8-4b0b-855a-2635717ff719","Type":"ContainerStarted","Data":"2ad242326ddc2fae4fa422c79ca5fab36cc6f03a49aa41063eccf5bd8535f15c"} Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.820669 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.820634666 podStartE2EDuration="2.820634666s" podCreationTimestamp="2025-09-30 00:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:42.80744989 +0000 UTC m=+5707.117738703" watchObservedRunningTime="2025-09-30 00:01:42.820634666 +0000 UTC m=+5707.130923479" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.836597 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.836579809 podStartE2EDuration="2.836579809s" podCreationTimestamp="2025-09-30 00:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:42.834328973 +0000 UTC m=+5707.144617786" watchObservedRunningTime="2025-09-30 00:01:42.836579809 +0000 UTC m=+5707.146868622" Sep 30 00:01:42 crc kubenswrapper[4922]: I0930 00:01:42.851038 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.8510168350000002 podStartE2EDuration="2.851016835s" podCreationTimestamp="2025-09-30 00:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:42.849859157 +0000 UTC m=+5707.160147970" watchObservedRunningTime="2025-09-30 00:01:42.851016835 +0000 UTC m=+5707.161305658" Sep 30 00:01:43 crc kubenswrapper[4922]: I0930 00:01:43.816876 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmp5d" event={"ID":"9539613f-19af-430f-bcbc-4dd019ba71bf","Type":"ContainerStarted","Data":"c90e8e4b231775cd1c41b07b43a81edf1e083525c0c2b33155ba4a42d68d0c97"} Sep 30 00:01:43 crc kubenswrapper[4922]: I0930 00:01:43.822225 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" event={"ID":"1c84a740-1f70-400d-a56a-ac889071f45e","Type":"ContainerStarted","Data":"41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191"} Sep 30 00:01:43 crc kubenswrapper[4922]: I0930 00:01:43.928616 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" podStartSLOduration=3.928595892 podStartE2EDuration="3.928595892s" podCreationTimestamp="2025-09-30 00:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:43.922209794 +0000 UTC m=+5708.232498607" watchObservedRunningTime="2025-09-30 00:01:43.928595892 +0000 UTC m=+5708.238884705" Sep 30 00:01:43 crc kubenswrapper[4922]: I0930 00:01:43.932723 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-bmp5d" podStartSLOduration=2.932707153 podStartE2EDuration="2.932707153s" podCreationTimestamp="2025-09-30 00:01:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:43.837214887 +0000 UTC m=+5708.147503730" watchObservedRunningTime="2025-09-30 00:01:43.932707153 +0000 UTC m=+5708.242995966" Sep 30 00:01:44 crc kubenswrapper[4922]: I0930 00:01:44.831989 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:45 crc kubenswrapper[4922]: I0930 00:01:45.845265 4922 generic.go:334] "Generic (PLEG): container finished" podID="9539613f-19af-430f-bcbc-4dd019ba71bf" containerID="c90e8e4b231775cd1c41b07b43a81edf1e083525c0c2b33155ba4a42d68d0c97" exitCode=0 Sep 30 00:01:45 crc kubenswrapper[4922]: I0930 00:01:45.846891 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmp5d" event={"ID":"9539613f-19af-430f-bcbc-4dd019ba71bf","Type":"ContainerDied","Data":"c90e8e4b231775cd1c41b07b43a81edf1e083525c0c2b33155ba4a42d68d0c97"} Sep 30 00:01:46 crc kubenswrapper[4922]: I0930 00:01:46.144538 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:46 crc kubenswrapper[4922]: I0930 00:01:46.317867 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:01:46 crc kubenswrapper[4922]: I0930 00:01:46.317970 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:01:46 crc kubenswrapper[4922]: I0930 00:01:46.410239 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 00:01:46 crc kubenswrapper[4922]: I0930 00:01:46.861217 4922 generic.go:334] "Generic (PLEG): container finished" podID="b6cbedef-764b-476b-a191-00443706e40d" containerID="f29183db1b8e1156b5b763de87dfc3c18da749229f3a705e89a995b5dc99feb6" exitCode=0 Sep 30 00:01:46 crc kubenswrapper[4922]: I0930 00:01:46.861447 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vl9l2" event={"ID":"b6cbedef-764b-476b-a191-00443706e40d","Type":"ContainerDied","Data":"f29183db1b8e1156b5b763de87dfc3c18da749229f3a705e89a995b5dc99feb6"} Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.287353 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.464742 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5r7p\" (UniqueName: \"kubernetes.io/projected/9539613f-19af-430f-bcbc-4dd019ba71bf-kube-api-access-w5r7p\") pod \"9539613f-19af-430f-bcbc-4dd019ba71bf\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.464972 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-scripts\") pod \"9539613f-19af-430f-bcbc-4dd019ba71bf\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.465042 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-combined-ca-bundle\") pod \"9539613f-19af-430f-bcbc-4dd019ba71bf\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.465100 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-config-data\") pod \"9539613f-19af-430f-bcbc-4dd019ba71bf\" (UID: \"9539613f-19af-430f-bcbc-4dd019ba71bf\") " Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.480603 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-scripts" (OuterVolumeSpecName: "scripts") pod "9539613f-19af-430f-bcbc-4dd019ba71bf" (UID: "9539613f-19af-430f-bcbc-4dd019ba71bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.481064 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9539613f-19af-430f-bcbc-4dd019ba71bf-kube-api-access-w5r7p" (OuterVolumeSpecName: "kube-api-access-w5r7p") pod "9539613f-19af-430f-bcbc-4dd019ba71bf" (UID: "9539613f-19af-430f-bcbc-4dd019ba71bf"). InnerVolumeSpecName "kube-api-access-w5r7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.499361 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9539613f-19af-430f-bcbc-4dd019ba71bf" (UID: "9539613f-19af-430f-bcbc-4dd019ba71bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.505682 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-config-data" (OuterVolumeSpecName: "config-data") pod "9539613f-19af-430f-bcbc-4dd019ba71bf" (UID: "9539613f-19af-430f-bcbc-4dd019ba71bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.567567 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.567614 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.567627 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9539613f-19af-430f-bcbc-4dd019ba71bf-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.567644 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5r7p\" (UniqueName: \"kubernetes.io/projected/9539613f-19af-430f-bcbc-4dd019ba71bf-kube-api-access-w5r7p\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.874164 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-bmp5d" event={"ID":"9539613f-19af-430f-bcbc-4dd019ba71bf","Type":"ContainerDied","Data":"b8b9e87494981fbb0bac24bcfef7f711090df1c27ae8dbf942871366e848e5e4"} Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.874258 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8b9e87494981fbb0bac24bcfef7f711090df1c27ae8dbf942871366e848e5e4" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.874194 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-bmp5d" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.971999 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:01:47 crc kubenswrapper[4922]: E0930 00:01:47.972565 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9539613f-19af-430f-bcbc-4dd019ba71bf" containerName="nova-cell1-conductor-db-sync" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.972586 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9539613f-19af-430f-bcbc-4dd019ba71bf" containerName="nova-cell1-conductor-db-sync" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.972834 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9539613f-19af-430f-bcbc-4dd019ba71bf" containerName="nova-cell1-conductor-db-sync" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.973717 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.976092 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 00:01:47 crc kubenswrapper[4922]: I0930 00:01:47.984884 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.077481 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zc4m\" (UniqueName: \"kubernetes.io/projected/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-kube-api-access-5zc4m\") pod \"nova-cell1-conductor-0\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.077639 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.077903 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.179651 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.179876 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zc4m\" (UniqueName: \"kubernetes.io/projected/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-kube-api-access-5zc4m\") pod \"nova-cell1-conductor-0\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.179943 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.187636 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.196335 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.197596 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zc4m\" (UniqueName: \"kubernetes.io/projected/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-kube-api-access-5zc4m\") pod \"nova-cell1-conductor-0\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.304532 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.307498 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.383088 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-scripts\") pod \"b6cbedef-764b-476b-a191-00443706e40d\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.383241 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fblvs\" (UniqueName: \"kubernetes.io/projected/b6cbedef-764b-476b-a191-00443706e40d-kube-api-access-fblvs\") pod \"b6cbedef-764b-476b-a191-00443706e40d\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.383265 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-combined-ca-bundle\") pod \"b6cbedef-764b-476b-a191-00443706e40d\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.383319 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-config-data\") pod \"b6cbedef-764b-476b-a191-00443706e40d\" (UID: \"b6cbedef-764b-476b-a191-00443706e40d\") " Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.387175 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cbedef-764b-476b-a191-00443706e40d-kube-api-access-fblvs" (OuterVolumeSpecName: "kube-api-access-fblvs") pod "b6cbedef-764b-476b-a191-00443706e40d" (UID: "b6cbedef-764b-476b-a191-00443706e40d"). InnerVolumeSpecName "kube-api-access-fblvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.388518 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-scripts" (OuterVolumeSpecName: "scripts") pod "b6cbedef-764b-476b-a191-00443706e40d" (UID: "b6cbedef-764b-476b-a191-00443706e40d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.408976 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-config-data" (OuterVolumeSpecName: "config-data") pod "b6cbedef-764b-476b-a191-00443706e40d" (UID: "b6cbedef-764b-476b-a191-00443706e40d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.410575 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6cbedef-764b-476b-a191-00443706e40d" (UID: "b6cbedef-764b-476b-a191-00443706e40d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.485588 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fblvs\" (UniqueName: \"kubernetes.io/projected/b6cbedef-764b-476b-a191-00443706e40d-kube-api-access-fblvs\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.485623 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.485639 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.485651 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6cbedef-764b-476b-a191-00443706e40d-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.770692 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:01:48 crc kubenswrapper[4922]: W0930 00:01:48.778176 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod284f3b0a_8c60_43d4_9ec8_40b2eaab29c6.slice/crio-f6ad361a030683beabdb0efdb5d6a9a5c5dc83d08c37d108758e5f0ae87b63ae WatchSource:0}: Error finding container f6ad361a030683beabdb0efdb5d6a9a5c5dc83d08c37d108758e5f0ae87b63ae: Status 404 returned error can't find the container with id f6ad361a030683beabdb0efdb5d6a9a5c5dc83d08c37d108758e5f0ae87b63ae Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.887685 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-vl9l2" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.887679 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-vl9l2" event={"ID":"b6cbedef-764b-476b-a191-00443706e40d","Type":"ContainerDied","Data":"95ddfd5ddd48de8c733fd7c91f9317caaf12e0bd3ec2561c31a5d1289677f7be"} Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.887802 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95ddfd5ddd48de8c733fd7c91f9317caaf12e0bd3ec2561c31a5d1289677f7be" Sep 30 00:01:48 crc kubenswrapper[4922]: I0930 00:01:48.890219 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6","Type":"ContainerStarted","Data":"f6ad361a030683beabdb0efdb5d6a9a5c5dc83d08c37d108758e5f0ae87b63ae"} Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.080021 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.080515 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e67ed73c-4cc5-4b00-b10e-9ed3273e51df" containerName="nova-scheduler-scheduler" containerID="cri-o://99c4fc21ab26e6e6a9e5bb34334e11d27189f9de9e8bd8c08a6d29794c3fce70" gracePeriod=30 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.098606 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.098808 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerName="nova-api-log" containerID="cri-o://4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1" gracePeriod=30 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.098929 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerName="nova-api-api" containerID="cri-o://994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777" gracePeriod=30 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.112772 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.112969 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e7a80858-45ae-47af-961b-b88bff637535" containerName="nova-metadata-log" containerID="cri-o://b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604" gracePeriod=30 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.113099 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e7a80858-45ae-47af-961b-b88bff637535" containerName="nova-metadata-metadata" containerID="cri-o://7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020" gracePeriod=30 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.708944 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.762192 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.821560 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-combined-ca-bundle\") pod \"e7a80858-45ae-47af-961b-b88bff637535\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.821634 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-config-data\") pod \"e7a80858-45ae-47af-961b-b88bff637535\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.821664 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx72l\" (UniqueName: \"kubernetes.io/projected/e7a80858-45ae-47af-961b-b88bff637535-kube-api-access-tx72l\") pod \"e7a80858-45ae-47af-961b-b88bff637535\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.821702 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7a80858-45ae-47af-961b-b88bff637535-logs\") pod \"e7a80858-45ae-47af-961b-b88bff637535\" (UID: \"e7a80858-45ae-47af-961b-b88bff637535\") " Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.822466 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7a80858-45ae-47af-961b-b88bff637535-logs" (OuterVolumeSpecName: "logs") pod "e7a80858-45ae-47af-961b-b88bff637535" (UID: "e7a80858-45ae-47af-961b-b88bff637535"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.829851 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7a80858-45ae-47af-961b-b88bff637535-kube-api-access-tx72l" (OuterVolumeSpecName: "kube-api-access-tx72l") pod "e7a80858-45ae-47af-961b-b88bff637535" (UID: "e7a80858-45ae-47af-961b-b88bff637535"). InnerVolumeSpecName "kube-api-access-tx72l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.851608 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-config-data" (OuterVolumeSpecName: "config-data") pod "e7a80858-45ae-47af-961b-b88bff637535" (UID: "e7a80858-45ae-47af-961b-b88bff637535"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.856687 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7a80858-45ae-47af-961b-b88bff637535" (UID: "e7a80858-45ae-47af-961b-b88bff637535"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.900734 4922 generic.go:334] "Generic (PLEG): container finished" podID="e67ed73c-4cc5-4b00-b10e-9ed3273e51df" containerID="99c4fc21ab26e6e6a9e5bb34334e11d27189f9de9e8bd8c08a6d29794c3fce70" exitCode=0 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.900808 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e67ed73c-4cc5-4b00-b10e-9ed3273e51df","Type":"ContainerDied","Data":"99c4fc21ab26e6e6a9e5bb34334e11d27189f9de9e8bd8c08a6d29794c3fce70"} Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.908121 4922 generic.go:334] "Generic (PLEG): container finished" podID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerID="994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777" exitCode=0 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.908149 4922 generic.go:334] "Generic (PLEG): container finished" podID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerID="4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1" exitCode=143 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.908194 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb32e6c9-94c8-4b0b-855a-2635717ff719","Type":"ContainerDied","Data":"994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777"} Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.908217 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb32e6c9-94c8-4b0b-855a-2635717ff719","Type":"ContainerDied","Data":"4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1"} Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.908227 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"bb32e6c9-94c8-4b0b-855a-2635717ff719","Type":"ContainerDied","Data":"2ad242326ddc2fae4fa422c79ca5fab36cc6f03a49aa41063eccf5bd8535f15c"} Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.908241 4922 scope.go:117] "RemoveContainer" containerID="994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.908326 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.912016 4922 generic.go:334] "Generic (PLEG): container finished" podID="e7a80858-45ae-47af-961b-b88bff637535" containerID="7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020" exitCode=0 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.912039 4922 generic.go:334] "Generic (PLEG): container finished" podID="e7a80858-45ae-47af-961b-b88bff637535" containerID="b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604" exitCode=143 Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.912083 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e7a80858-45ae-47af-961b-b88bff637535","Type":"ContainerDied","Data":"7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020"} Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.912087 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.912104 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e7a80858-45ae-47af-961b-b88bff637535","Type":"ContainerDied","Data":"b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604"} Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.912113 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e7a80858-45ae-47af-961b-b88bff637535","Type":"ContainerDied","Data":"8528b38cf742d6bc6ccace475e93a55faadd1e2ef3bc0b7d34061b7840540f9b"} Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.913805 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6","Type":"ContainerStarted","Data":"83da8fb26fc5825bbd39d469e693b4ee3c3a1b5fbd7a3d1e7f4890a3246d0179"} Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.913962 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.916831 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.922632 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-config-data\") pod \"bb32e6c9-94c8-4b0b-855a-2635717ff719\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.922697 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-combined-ca-bundle\") pod \"bb32e6c9-94c8-4b0b-855a-2635717ff719\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.922739 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb32e6c9-94c8-4b0b-855a-2635717ff719-logs\") pod \"bb32e6c9-94c8-4b0b-855a-2635717ff719\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.922834 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7g67\" (UniqueName: \"kubernetes.io/projected/bb32e6c9-94c8-4b0b-855a-2635717ff719-kube-api-access-x7g67\") pod \"bb32e6c9-94c8-4b0b-855a-2635717ff719\" (UID: \"bb32e6c9-94c8-4b0b-855a-2635717ff719\") " Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.923191 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.923207 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7a80858-45ae-47af-961b-b88bff637535-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.923215 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx72l\" (UniqueName: \"kubernetes.io/projected/e7a80858-45ae-47af-961b-b88bff637535-kube-api-access-tx72l\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.923224 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7a80858-45ae-47af-961b-b88bff637535-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.924028 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb32e6c9-94c8-4b0b-855a-2635717ff719-logs" (OuterVolumeSpecName: "logs") pod "bb32e6c9-94c8-4b0b-855a-2635717ff719" (UID: "bb32e6c9-94c8-4b0b-855a-2635717ff719"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.927190 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb32e6c9-94c8-4b0b-855a-2635717ff719-kube-api-access-x7g67" (OuterVolumeSpecName: "kube-api-access-x7g67") pod "bb32e6c9-94c8-4b0b-855a-2635717ff719" (UID: "bb32e6c9-94c8-4b0b-855a-2635717ff719"). InnerVolumeSpecName "kube-api-access-x7g67". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.939328 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.939308626 podStartE2EDuration="2.939308626s" podCreationTimestamp="2025-09-30 00:01:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:49.932719373 +0000 UTC m=+5714.243008186" watchObservedRunningTime="2025-09-30 00:01:49.939308626 +0000 UTC m=+5714.249597439" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.940774 4922 scope.go:117] "RemoveContainer" containerID="4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.976711 4922 scope.go:117] "RemoveContainer" containerID="994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.981905 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bb32e6c9-94c8-4b0b-855a-2635717ff719" (UID: "bb32e6c9-94c8-4b0b-855a-2635717ff719"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:49 crc kubenswrapper[4922]: E0930 00:01:49.988281 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777\": container with ID starting with 994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777 not found: ID does not exist" containerID="994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.988333 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777"} err="failed to get container status \"994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777\": rpc error: code = NotFound desc = could not find container \"994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777\": container with ID starting with 994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777 not found: ID does not exist" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.988374 4922 scope.go:117] "RemoveContainer" containerID="4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1" Sep 30 00:01:49 crc kubenswrapper[4922]: E0930 00:01:49.988694 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1\": container with ID starting with 4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1 not found: ID does not exist" containerID="4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.988728 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1"} err="failed to get container status \"4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1\": rpc error: code = NotFound desc = could not find container \"4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1\": container with ID starting with 4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1 not found: ID does not exist" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.988743 4922 scope.go:117] "RemoveContainer" containerID="994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.988982 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777"} err="failed to get container status \"994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777\": rpc error: code = NotFound desc = could not find container \"994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777\": container with ID starting with 994e6caff4774d3837380b9686768be0cefcb043bd9164d8c26534a6466d9777 not found: ID does not exist" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.989003 4922 scope.go:117] "RemoveContainer" containerID="4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.989312 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1"} err="failed to get container status \"4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1\": rpc error: code = NotFound desc = could not find container \"4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1\": container with ID starting with 4fde0a55532be8e9d5e30bd987e7f688034de2f66f1405bd69682c6e695ac4b1 not found: ID does not exist" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.989338 4922 scope.go:117] "RemoveContainer" containerID="7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.994672 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-config-data" (OuterVolumeSpecName: "config-data") pod "bb32e6c9-94c8-4b0b-855a-2635717ff719" (UID: "bb32e6c9-94c8-4b0b-855a-2635717ff719"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:49 crc kubenswrapper[4922]: I0930 00:01:49.995911 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.016270 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.020367 4922 scope.go:117] "RemoveContainer" containerID="b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.025162 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-combined-ca-bundle\") pod \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.025232 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-config-data\") pod \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.025276 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6p7ts\" (UniqueName: \"kubernetes.io/projected/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-kube-api-access-6p7ts\") pod \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\" (UID: \"e67ed73c-4cc5-4b00-b10e-9ed3273e51df\") " Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.026436 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.026462 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb32e6c9-94c8-4b0b-855a-2635717ff719-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.026475 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bb32e6c9-94c8-4b0b-855a-2635717ff719-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.026486 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7g67\" (UniqueName: \"kubernetes.io/projected/bb32e6c9-94c8-4b0b-855a-2635717ff719-kube-api-access-x7g67\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.029447 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-kube-api-access-6p7ts" (OuterVolumeSpecName: "kube-api-access-6p7ts") pod "e67ed73c-4cc5-4b00-b10e-9ed3273e51df" (UID: "e67ed73c-4cc5-4b00-b10e-9ed3273e51df"). InnerVolumeSpecName "kube-api-access-6p7ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.029489 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: E0930 00:01:50.029921 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a80858-45ae-47af-961b-b88bff637535" containerName="nova-metadata-metadata" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.029940 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a80858-45ae-47af-961b-b88bff637535" containerName="nova-metadata-metadata" Sep 30 00:01:50 crc kubenswrapper[4922]: E0930 00:01:50.029958 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerName="nova-api-api" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.029966 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerName="nova-api-api" Sep 30 00:01:50 crc kubenswrapper[4922]: E0930 00:01:50.029980 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6cbedef-764b-476b-a191-00443706e40d" containerName="nova-manage" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.029985 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6cbedef-764b-476b-a191-00443706e40d" containerName="nova-manage" Sep 30 00:01:50 crc kubenswrapper[4922]: E0930 00:01:50.029999 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e67ed73c-4cc5-4b00-b10e-9ed3273e51df" containerName="nova-scheduler-scheduler" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.030005 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e67ed73c-4cc5-4b00-b10e-9ed3273e51df" containerName="nova-scheduler-scheduler" Sep 30 00:01:50 crc kubenswrapper[4922]: E0930 00:01:50.030021 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerName="nova-api-log" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.030028 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerName="nova-api-log" Sep 30 00:01:50 crc kubenswrapper[4922]: E0930 00:01:50.030043 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a80858-45ae-47af-961b-b88bff637535" containerName="nova-metadata-log" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.030049 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a80858-45ae-47af-961b-b88bff637535" containerName="nova-metadata-log" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.030209 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerName="nova-api-log" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.030227 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e67ed73c-4cc5-4b00-b10e-9ed3273e51df" containerName="nova-scheduler-scheduler" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.030241 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7a80858-45ae-47af-961b-b88bff637535" containerName="nova-metadata-log" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.030251 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb32e6c9-94c8-4b0b-855a-2635717ff719" containerName="nova-api-api" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.030259 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6cbedef-764b-476b-a191-00443706e40d" containerName="nova-manage" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.030270 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7a80858-45ae-47af-961b-b88bff637535" containerName="nova-metadata-metadata" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.031302 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.034921 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.038857 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.057281 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e67ed73c-4cc5-4b00-b10e-9ed3273e51df" (UID: "e67ed73c-4cc5-4b00-b10e-9ed3273e51df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.062674 4922 scope.go:117] "RemoveContainer" containerID="7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.063097 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-config-data" (OuterVolumeSpecName: "config-data") pod "e67ed73c-4cc5-4b00-b10e-9ed3273e51df" (UID: "e67ed73c-4cc5-4b00-b10e-9ed3273e51df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:01:50 crc kubenswrapper[4922]: E0930 00:01:50.063135 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020\": container with ID starting with 7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020 not found: ID does not exist" containerID="7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.063166 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020"} err="failed to get container status \"7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020\": rpc error: code = NotFound desc = could not find container \"7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020\": container with ID starting with 7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020 not found: ID does not exist" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.063186 4922 scope.go:117] "RemoveContainer" containerID="b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604" Sep 30 00:01:50 crc kubenswrapper[4922]: E0930 00:01:50.063582 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604\": container with ID starting with b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604 not found: ID does not exist" containerID="b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.063628 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604"} err="failed to get container status \"b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604\": rpc error: code = NotFound desc = could not find container \"b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604\": container with ID starting with b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604 not found: ID does not exist" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.063643 4922 scope.go:117] "RemoveContainer" containerID="7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.063949 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020"} err="failed to get container status \"7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020\": rpc error: code = NotFound desc = could not find container \"7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020\": container with ID starting with 7dfc6bd77bbb6391d26b0ff6931916923668106050a846458ee70477e76e3020 not found: ID does not exist" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.063973 4922 scope.go:117] "RemoveContainer" containerID="b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.064248 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604"} err="failed to get container status \"b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604\": rpc error: code = NotFound desc = could not find container \"b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604\": container with ID starting with b750cf96627423f263aafc87cdced30b3ca1c0acadb64b1ada73c56388b10604 not found: ID does not exist" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.129609 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-config-data\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.129683 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hft7\" (UniqueName: \"kubernetes.io/projected/6e99fe60-fb50-446f-af9d-5649ac357b84-kube-api-access-7hft7\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.129736 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e99fe60-fb50-446f-af9d-5649ac357b84-logs\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.129758 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.129930 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.129943 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6p7ts\" (UniqueName: \"kubernetes.io/projected/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-kube-api-access-6p7ts\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.129954 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e67ed73c-4cc5-4b00-b10e-9ed3273e51df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.231256 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-config-data\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.231306 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hft7\" (UniqueName: \"kubernetes.io/projected/6e99fe60-fb50-446f-af9d-5649ac357b84-kube-api-access-7hft7\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.231351 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e99fe60-fb50-446f-af9d-5649ac357b84-logs\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.231367 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.232161 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e99fe60-fb50-446f-af9d-5649ac357b84-logs\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.236370 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.236823 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-config-data\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.254914 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hft7\" (UniqueName: \"kubernetes.io/projected/6e99fe60-fb50-446f-af9d-5649ac357b84-kube-api-access-7hft7\") pod \"nova-metadata-0\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.338548 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.349281 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.349361 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.374513 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.376921 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.381334 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.391039 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.433426 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb32e6c9-94c8-4b0b-855a-2635717ff719" path="/var/lib/kubelet/pods/bb32e6c9-94c8-4b0b-855a-2635717ff719/volumes" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.434205 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7a80858-45ae-47af-961b-b88bff637535" path="/var/lib/kubelet/pods/e7a80858-45ae-47af-961b-b88bff637535/volumes" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.537374 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmhbh\" (UniqueName: \"kubernetes.io/projected/4fcaefee-0655-4404-9c4e-a87e706f8666-kube-api-access-vmhbh\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.537751 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.538071 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fcaefee-0655-4404-9c4e-a87e706f8666-logs\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.538494 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-config-data\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: E0930 00:01:50.614965 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode67ed73c_4cc5_4b00_b10e_9ed3273e51df.slice/crio-c7583e8915f1e78288f461a27ccd6232c4ba6968fb6fd87935e189f6eb57b1db\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode67ed73c_4cc5_4b00_b10e_9ed3273e51df.slice\": RecentStats: unable to find data in memory cache]" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.640220 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.640332 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fcaefee-0655-4404-9c4e-a87e706f8666-logs\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.640459 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-config-data\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.640501 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmhbh\" (UniqueName: \"kubernetes.io/projected/4fcaefee-0655-4404-9c4e-a87e706f8666-kube-api-access-vmhbh\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.640894 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fcaefee-0655-4404-9c4e-a87e706f8666-logs\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.645247 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-config-data\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.659757 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.660532 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmhbh\" (UniqueName: \"kubernetes.io/projected/4fcaefee-0655-4404-9c4e-a87e706f8666-kube-api-access-vmhbh\") pod \"nova-api-0\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.749354 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.806829 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: W0930 00:01:50.807242 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e99fe60_fb50_446f_af9d_5649ac357b84.slice/crio-f71e27e9538764cc230493e435249bb305f0654ca5907a6bfb79d8ddc17d4019 WatchSource:0}: Error finding container f71e27e9538764cc230493e435249bb305f0654ca5907a6bfb79d8ddc17d4019: Status 404 returned error can't find the container with id f71e27e9538764cc230493e435249bb305f0654ca5907a6bfb79d8ddc17d4019 Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.927465 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e99fe60-fb50-446f-af9d-5649ac357b84","Type":"ContainerStarted","Data":"f71e27e9538764cc230493e435249bb305f0654ca5907a6bfb79d8ddc17d4019"} Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.929608 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e67ed73c-4cc5-4b00-b10e-9ed3273e51df","Type":"ContainerDied","Data":"c7583e8915f1e78288f461a27ccd6232c4ba6968fb6fd87935e189f6eb57b1db"} Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.929662 4922 scope.go:117] "RemoveContainer" containerID="99c4fc21ab26e6e6a9e5bb34334e11d27189f9de9e8bd8c08a6d29794c3fce70" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.929671 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.961537 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.970008 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.977778 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.978988 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.984872 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 00:01:50 crc kubenswrapper[4922]: I0930 00:01:50.991934 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.145140 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.153637 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mqlq\" (UniqueName: \"kubernetes.io/projected/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-kube-api-access-6mqlq\") pod \"nova-scheduler-0\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.153710 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-config-data\") pod \"nova-scheduler-0\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.153735 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.156225 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.215134 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.255177 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mqlq\" (UniqueName: \"kubernetes.io/projected/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-kube-api-access-6mqlq\") pod \"nova-scheduler-0\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.255257 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-config-data\") pod \"nova-scheduler-0\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.255290 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.259885 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.260826 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-config-data\") pod \"nova-scheduler-0\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.278217 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mqlq\" (UniqueName: \"kubernetes.io/projected/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-kube-api-access-6mqlq\") pod \"nova-scheduler-0\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.306347 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.437977 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.502148 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9b4888cd9-xfvzx"] Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.502483 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" podUID="4a7e5135-ca78-409c-90cc-701121a9a777" containerName="dnsmasq-dns" containerID="cri-o://e46f49827c57bf4b871a31254d4c25941c91991f62f2cc02923230f00f3a58b7" gracePeriod=10 Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.849926 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.958540 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fcaefee-0655-4404-9c4e-a87e706f8666","Type":"ContainerStarted","Data":"84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f"} Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.958577 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fcaefee-0655-4404-9c4e-a87e706f8666","Type":"ContainerStarted","Data":"1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35"} Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.958587 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fcaefee-0655-4404-9c4e-a87e706f8666","Type":"ContainerStarted","Data":"d94080cafcfcaebafa2027ead77a30b89ed2bdcaecf2af21164ffcd620f604d7"} Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.975733 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a","Type":"ContainerStarted","Data":"1c5e63d5c96a71356dabcb3078621c5ccd290aee0f4b03d6428dc12ab729a8c5"} Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.978279 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e99fe60-fb50-446f-af9d-5649ac357b84","Type":"ContainerStarted","Data":"eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32"} Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.978326 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e99fe60-fb50-446f-af9d-5649ac357b84","Type":"ContainerStarted","Data":"0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da"} Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.980457 4922 generic.go:334] "Generic (PLEG): container finished" podID="4a7e5135-ca78-409c-90cc-701121a9a777" containerID="e46f49827c57bf4b871a31254d4c25941c91991f62f2cc02923230f00f3a58b7" exitCode=0 Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.980539 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" event={"ID":"4a7e5135-ca78-409c-90cc-701121a9a777","Type":"ContainerDied","Data":"e46f49827c57bf4b871a31254d4c25941c91991f62f2cc02923230f00f3a58b7"} Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.990376 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:01:51 crc kubenswrapper[4922]: I0930 00:01:51.999745 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.9997273020000002 podStartE2EDuration="1.999727302s" podCreationTimestamp="2025-09-30 00:01:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:51.990004812 +0000 UTC m=+5716.300293625" watchObservedRunningTime="2025-09-30 00:01:51.999727302 +0000 UTC m=+5716.310016115" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.029832 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.029815255 podStartE2EDuration="3.029815255s" podCreationTimestamp="2025-09-30 00:01:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:52.0223119 +0000 UTC m=+5716.332600713" watchObservedRunningTime="2025-09-30 00:01:52.029815255 +0000 UTC m=+5716.340104068" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.085455 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.200975 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5vq9\" (UniqueName: \"kubernetes.io/projected/4a7e5135-ca78-409c-90cc-701121a9a777-kube-api-access-s5vq9\") pod \"4a7e5135-ca78-409c-90cc-701121a9a777\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.201286 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-config\") pod \"4a7e5135-ca78-409c-90cc-701121a9a777\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.201434 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-sb\") pod \"4a7e5135-ca78-409c-90cc-701121a9a777\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.201531 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-nb\") pod \"4a7e5135-ca78-409c-90cc-701121a9a777\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.201629 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-dns-svc\") pod \"4a7e5135-ca78-409c-90cc-701121a9a777\" (UID: \"4a7e5135-ca78-409c-90cc-701121a9a777\") " Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.212514 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a7e5135-ca78-409c-90cc-701121a9a777-kube-api-access-s5vq9" (OuterVolumeSpecName: "kube-api-access-s5vq9") pod "4a7e5135-ca78-409c-90cc-701121a9a777" (UID: "4a7e5135-ca78-409c-90cc-701121a9a777"). InnerVolumeSpecName "kube-api-access-s5vq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.255176 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4a7e5135-ca78-409c-90cc-701121a9a777" (UID: "4a7e5135-ca78-409c-90cc-701121a9a777"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.270465 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4a7e5135-ca78-409c-90cc-701121a9a777" (UID: "4a7e5135-ca78-409c-90cc-701121a9a777"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.272164 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4a7e5135-ca78-409c-90cc-701121a9a777" (UID: "4a7e5135-ca78-409c-90cc-701121a9a777"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.280933 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-config" (OuterVolumeSpecName: "config") pod "4a7e5135-ca78-409c-90cc-701121a9a777" (UID: "4a7e5135-ca78-409c-90cc-701121a9a777"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.304325 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.304375 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.304387 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.304407 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a7e5135-ca78-409c-90cc-701121a9a777-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.304418 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5vq9\" (UniqueName: \"kubernetes.io/projected/4a7e5135-ca78-409c-90cc-701121a9a777-kube-api-access-s5vq9\") on node \"crc\" DevicePath \"\"" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.432278 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e67ed73c-4cc5-4b00-b10e-9ed3273e51df" path="/var/lib/kubelet/pods/e67ed73c-4cc5-4b00-b10e-9ed3273e51df/volumes" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.993769 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" event={"ID":"4a7e5135-ca78-409c-90cc-701121a9a777","Type":"ContainerDied","Data":"fdc9b52d3a5387fa6b79c73b4e00c014a01a4dcb7d60e0cd18cef43d3a7e1716"} Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.995165 4922 scope.go:117] "RemoveContainer" containerID="e46f49827c57bf4b871a31254d4c25941c91991f62f2cc02923230f00f3a58b7" Sep 30 00:01:52 crc kubenswrapper[4922]: I0930 00:01:52.995497 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b4888cd9-xfvzx" Sep 30 00:01:53 crc kubenswrapper[4922]: I0930 00:01:53.004949 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a","Type":"ContainerStarted","Data":"ebf264b1e4cf66becbf2c5cd39b387573c145ba47de2547c6308c16520789aa3"} Sep 30 00:01:53 crc kubenswrapper[4922]: I0930 00:01:53.038479 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9b4888cd9-xfvzx"] Sep 30 00:01:53 crc kubenswrapper[4922]: I0930 00:01:53.042248 4922 scope.go:117] "RemoveContainer" containerID="3f86b17b36135db1ac391c4c27973ef75da08f863366e8cd28f20a8d44244a69" Sep 30 00:01:53 crc kubenswrapper[4922]: I0930 00:01:53.048761 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9b4888cd9-xfvzx"] Sep 30 00:01:53 crc kubenswrapper[4922]: I0930 00:01:53.050520 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.050495678 podStartE2EDuration="3.050495678s" podCreationTimestamp="2025-09-30 00:01:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:01:53.046825168 +0000 UTC m=+5717.357114021" watchObservedRunningTime="2025-09-30 00:01:53.050495678 +0000 UTC m=+5717.360784501" Sep 30 00:01:54 crc kubenswrapper[4922]: I0930 00:01:54.435587 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a7e5135-ca78-409c-90cc-701121a9a777" path="/var/lib/kubelet/pods/4a7e5135-ca78-409c-90cc-701121a9a777/volumes" Sep 30 00:01:55 crc kubenswrapper[4922]: I0930 00:01:55.350339 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:01:55 crc kubenswrapper[4922]: I0930 00:01:55.350378 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:01:56 crc kubenswrapper[4922]: I0930 00:01:56.307176 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.340651 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.845458 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-57nb4"] Sep 30 00:01:58 crc kubenswrapper[4922]: E0930 00:01:58.845819 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a7e5135-ca78-409c-90cc-701121a9a777" containerName="dnsmasq-dns" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.845836 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a7e5135-ca78-409c-90cc-701121a9a777" containerName="dnsmasq-dns" Sep 30 00:01:58 crc kubenswrapper[4922]: E0930 00:01:58.845852 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a7e5135-ca78-409c-90cc-701121a9a777" containerName="init" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.845859 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a7e5135-ca78-409c-90cc-701121a9a777" containerName="init" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.846047 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a7e5135-ca78-409c-90cc-701121a9a777" containerName="dnsmasq-dns" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.846692 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.849172 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.849402 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.893625 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-57nb4"] Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.912801 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.912859 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.926124 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9frr\" (UniqueName: \"kubernetes.io/projected/fcef0ab0-3df5-4835-b938-9c49e92ae366-kube-api-access-l9frr\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.926179 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-config-data\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.926278 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-scripts\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:58 crc kubenswrapper[4922]: I0930 00:01:58.926359 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.027463 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9frr\" (UniqueName: \"kubernetes.io/projected/fcef0ab0-3df5-4835-b938-9c49e92ae366-kube-api-access-l9frr\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.027798 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-config-data\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.027856 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-scripts\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.027903 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.041339 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-scripts\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.041383 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-config-data\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.041339 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.048009 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9frr\" (UniqueName: \"kubernetes.io/projected/fcef0ab0-3df5-4835-b938-9c49e92ae366-kube-api-access-l9frr\") pod \"nova-cell1-cell-mapping-57nb4\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.165693 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:01:59 crc kubenswrapper[4922]: I0930 00:01:59.562844 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-57nb4"] Sep 30 00:02:00 crc kubenswrapper[4922]: I0930 00:02:00.083023 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-57nb4" event={"ID":"fcef0ab0-3df5-4835-b938-9c49e92ae366","Type":"ContainerStarted","Data":"1f67d70f27862ed5c75ca62d1d3cdf78eb529fc104dee0d549963a7efbac033b"} Sep 30 00:02:00 crc kubenswrapper[4922]: I0930 00:02:00.083365 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-57nb4" event={"ID":"fcef0ab0-3df5-4835-b938-9c49e92ae366","Type":"ContainerStarted","Data":"19660d1ce20413a9f5ff84796a56629a1f336148256dbd6b763a3baae35e2b3d"} Sep 30 00:02:00 crc kubenswrapper[4922]: I0930 00:02:00.102002 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-57nb4" podStartSLOduration=2.101969991 podStartE2EDuration="2.101969991s" podCreationTimestamp="2025-09-30 00:01:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:02:00.098144456 +0000 UTC m=+5724.408433309" watchObservedRunningTime="2025-09-30 00:02:00.101969991 +0000 UTC m=+5724.412258834" Sep 30 00:02:00 crc kubenswrapper[4922]: I0930 00:02:00.350276 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:02:00 crc kubenswrapper[4922]: I0930 00:02:00.350316 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:02:00 crc kubenswrapper[4922]: I0930 00:02:00.749566 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:02:00 crc kubenswrapper[4922]: I0930 00:02:00.749842 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:02:01 crc kubenswrapper[4922]: I0930 00:02:01.307120 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 00:02:01 crc kubenswrapper[4922]: I0930 00:02:01.342671 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 00:02:01 crc kubenswrapper[4922]: I0930 00:02:01.431683 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.68:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:02:01 crc kubenswrapper[4922]: I0930 00:02:01.431709 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.68:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:02:01 crc kubenswrapper[4922]: I0930 00:02:01.832668 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.69:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:02:01 crc kubenswrapper[4922]: I0930 00:02:01.832688 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.69:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:02:02 crc kubenswrapper[4922]: I0930 00:02:02.141347 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 00:02:05 crc kubenswrapper[4922]: I0930 00:02:05.159544 4922 generic.go:334] "Generic (PLEG): container finished" podID="fcef0ab0-3df5-4835-b938-9c49e92ae366" containerID="1f67d70f27862ed5c75ca62d1d3cdf78eb529fc104dee0d549963a7efbac033b" exitCode=0 Sep 30 00:02:05 crc kubenswrapper[4922]: I0930 00:02:05.159657 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-57nb4" event={"ID":"fcef0ab0-3df5-4835-b938-9c49e92ae366","Type":"ContainerDied","Data":"1f67d70f27862ed5c75ca62d1d3cdf78eb529fc104dee0d549963a7efbac033b"} Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.555657 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.672949 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-scripts\") pod \"fcef0ab0-3df5-4835-b938-9c49e92ae366\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.673064 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-config-data\") pod \"fcef0ab0-3df5-4835-b938-9c49e92ae366\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.673225 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-combined-ca-bundle\") pod \"fcef0ab0-3df5-4835-b938-9c49e92ae366\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.673269 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9frr\" (UniqueName: \"kubernetes.io/projected/fcef0ab0-3df5-4835-b938-9c49e92ae366-kube-api-access-l9frr\") pod \"fcef0ab0-3df5-4835-b938-9c49e92ae366\" (UID: \"fcef0ab0-3df5-4835-b938-9c49e92ae366\") " Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.677698 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-scripts" (OuterVolumeSpecName: "scripts") pod "fcef0ab0-3df5-4835-b938-9c49e92ae366" (UID: "fcef0ab0-3df5-4835-b938-9c49e92ae366"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.678127 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcef0ab0-3df5-4835-b938-9c49e92ae366-kube-api-access-l9frr" (OuterVolumeSpecName: "kube-api-access-l9frr") pod "fcef0ab0-3df5-4835-b938-9c49e92ae366" (UID: "fcef0ab0-3df5-4835-b938-9c49e92ae366"). InnerVolumeSpecName "kube-api-access-l9frr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.699863 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcef0ab0-3df5-4835-b938-9c49e92ae366" (UID: "fcef0ab0-3df5-4835-b938-9c49e92ae366"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.701601 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-config-data" (OuterVolumeSpecName: "config-data") pod "fcef0ab0-3df5-4835-b938-9c49e92ae366" (UID: "fcef0ab0-3df5-4835-b938-9c49e92ae366"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.775997 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.776047 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9frr\" (UniqueName: \"kubernetes.io/projected/fcef0ab0-3df5-4835-b938-9c49e92ae366-kube-api-access-l9frr\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.776065 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:06 crc kubenswrapper[4922]: I0930 00:02:06.776078 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcef0ab0-3df5-4835-b938-9c49e92ae366-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.187827 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-57nb4" event={"ID":"fcef0ab0-3df5-4835-b938-9c49e92ae366","Type":"ContainerDied","Data":"19660d1ce20413a9f5ff84796a56629a1f336148256dbd6b763a3baae35e2b3d"} Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.187880 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19660d1ce20413a9f5ff84796a56629a1f336148256dbd6b763a3baae35e2b3d" Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.187930 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-57nb4" Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.329563 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.329844 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-log" containerID="cri-o://1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35" gracePeriod=30 Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.330128 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-api" containerID="cri-o://84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f" gracePeriod=30 Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.348506 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.348688 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="10b454f0-f8fc-4ae6-a7ed-5ce801f6058a" containerName="nova-scheduler-scheduler" containerID="cri-o://ebf264b1e4cf66becbf2c5cd39b387573c145ba47de2547c6308c16520789aa3" gracePeriod=30 Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.358542 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.358850 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-log" containerID="cri-o://0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da" gracePeriod=30 Sep 30 00:02:07 crc kubenswrapper[4922]: I0930 00:02:07.358917 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-metadata" containerID="cri-o://eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32" gracePeriod=30 Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.228978 4922 generic.go:334] "Generic (PLEG): container finished" podID="10b454f0-f8fc-4ae6-a7ed-5ce801f6058a" containerID="ebf264b1e4cf66becbf2c5cd39b387573c145ba47de2547c6308c16520789aa3" exitCode=0 Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.229579 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a","Type":"ContainerDied","Data":"ebf264b1e4cf66becbf2c5cd39b387573c145ba47de2547c6308c16520789aa3"} Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.233013 4922 generic.go:334] "Generic (PLEG): container finished" podID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerID="0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da" exitCode=143 Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.233086 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e99fe60-fb50-446f-af9d-5649ac357b84","Type":"ContainerDied","Data":"0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da"} Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.235570 4922 generic.go:334] "Generic (PLEG): container finished" podID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerID="1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35" exitCode=143 Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.235594 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fcaefee-0655-4404-9c4e-a87e706f8666","Type":"ContainerDied","Data":"1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35"} Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.538480 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.630635 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mqlq\" (UniqueName: \"kubernetes.io/projected/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-kube-api-access-6mqlq\") pod \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.630860 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-config-data\") pod \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.630929 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-combined-ca-bundle\") pod \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\" (UID: \"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a\") " Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.646837 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-kube-api-access-6mqlq" (OuterVolumeSpecName: "kube-api-access-6mqlq") pod "10b454f0-f8fc-4ae6-a7ed-5ce801f6058a" (UID: "10b454f0-f8fc-4ae6-a7ed-5ce801f6058a"). InnerVolumeSpecName "kube-api-access-6mqlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.655926 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10b454f0-f8fc-4ae6-a7ed-5ce801f6058a" (UID: "10b454f0-f8fc-4ae6-a7ed-5ce801f6058a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.657429 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-config-data" (OuterVolumeSpecName: "config-data") pod "10b454f0-f8fc-4ae6-a7ed-5ce801f6058a" (UID: "10b454f0-f8fc-4ae6-a7ed-5ce801f6058a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.732947 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mqlq\" (UniqueName: \"kubernetes.io/projected/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-kube-api-access-6mqlq\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.732985 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:08 crc kubenswrapper[4922]: I0930 00:02:08.733001 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.249795 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"10b454f0-f8fc-4ae6-a7ed-5ce801f6058a","Type":"ContainerDied","Data":"1c5e63d5c96a71356dabcb3078621c5ccd290aee0f4b03d6428dc12ab729a8c5"} Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.249857 4922 scope.go:117] "RemoveContainer" containerID="ebf264b1e4cf66becbf2c5cd39b387573c145ba47de2547c6308c16520789aa3" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.249881 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.293645 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.311461 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.320853 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:02:09 crc kubenswrapper[4922]: E0930 00:02:09.321279 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10b454f0-f8fc-4ae6-a7ed-5ce801f6058a" containerName="nova-scheduler-scheduler" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.321296 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="10b454f0-f8fc-4ae6-a7ed-5ce801f6058a" containerName="nova-scheduler-scheduler" Sep 30 00:02:09 crc kubenswrapper[4922]: E0930 00:02:09.321327 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcef0ab0-3df5-4835-b938-9c49e92ae366" containerName="nova-manage" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.321335 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcef0ab0-3df5-4835-b938-9c49e92ae366" containerName="nova-manage" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.321557 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="10b454f0-f8fc-4ae6-a7ed-5ce801f6058a" containerName="nova-scheduler-scheduler" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.321595 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcef0ab0-3df5-4835-b938-9c49e92ae366" containerName="nova-manage" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.322360 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.327593 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.337523 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.445426 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ql7q\" (UniqueName: \"kubernetes.io/projected/4182d7b7-7987-4b76-8325-f312ed9dff54-kube-api-access-5ql7q\") pod \"nova-scheduler-0\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.445918 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-config-data\") pod \"nova-scheduler-0\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.446012 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.547296 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ql7q\" (UniqueName: \"kubernetes.io/projected/4182d7b7-7987-4b76-8325-f312ed9dff54-kube-api-access-5ql7q\") pod \"nova-scheduler-0\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.547440 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-config-data\") pod \"nova-scheduler-0\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.547458 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.554677 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.562974 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-config-data\") pod \"nova-scheduler-0\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.571157 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ql7q\" (UniqueName: \"kubernetes.io/projected/4182d7b7-7987-4b76-8325-f312ed9dff54-kube-api-access-5ql7q\") pod \"nova-scheduler-0\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " pod="openstack/nova-scheduler-0" Sep 30 00:02:09 crc kubenswrapper[4922]: I0930 00:02:09.651062 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:02:10 crc kubenswrapper[4922]: I0930 00:02:10.075211 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:02:10 crc kubenswrapper[4922]: W0930 00:02:10.077053 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4182d7b7_7987_4b76_8325_f312ed9dff54.slice/crio-4a6414f400761c9b6a3e26f977fda787c0d15dd2a6157e1418c1b8165387d76b WatchSource:0}: Error finding container 4a6414f400761c9b6a3e26f977fda787c0d15dd2a6157e1418c1b8165387d76b: Status 404 returned error can't find the container with id 4a6414f400761c9b6a3e26f977fda787c0d15dd2a6157e1418c1b8165387d76b Sep 30 00:02:10 crc kubenswrapper[4922]: I0930 00:02:10.260501 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4182d7b7-7987-4b76-8325-f312ed9dff54","Type":"ContainerStarted","Data":"bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5"} Sep 30 00:02:10 crc kubenswrapper[4922]: I0930 00:02:10.261473 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4182d7b7-7987-4b76-8325-f312ed9dff54","Type":"ContainerStarted","Data":"4a6414f400761c9b6a3e26f977fda787c0d15dd2a6157e1418c1b8165387d76b"} Sep 30 00:02:10 crc kubenswrapper[4922]: I0930 00:02:10.282049 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.282032425 podStartE2EDuration="1.282032425s" podCreationTimestamp="2025-09-30 00:02:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:02:10.277734059 +0000 UTC m=+5734.588022892" watchObservedRunningTime="2025-09-30 00:02:10.282032425 +0000 UTC m=+5734.592321238" Sep 30 00:02:10 crc kubenswrapper[4922]: I0930 00:02:10.440444 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10b454f0-f8fc-4ae6-a7ed-5ce801f6058a" path="/var/lib/kubelet/pods/10b454f0-f8fc-4ae6-a7ed-5ce801f6058a/volumes" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.017429 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.024536 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.075636 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hft7\" (UniqueName: \"kubernetes.io/projected/6e99fe60-fb50-446f-af9d-5649ac357b84-kube-api-access-7hft7\") pod \"6e99fe60-fb50-446f-af9d-5649ac357b84\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.075690 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e99fe60-fb50-446f-af9d-5649ac357b84-logs\") pod \"6e99fe60-fb50-446f-af9d-5649ac357b84\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.075735 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-combined-ca-bundle\") pod \"4fcaefee-0655-4404-9c4e-a87e706f8666\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.075768 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-config-data\") pod \"6e99fe60-fb50-446f-af9d-5649ac357b84\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.075788 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-config-data\") pod \"4fcaefee-0655-4404-9c4e-a87e706f8666\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.075906 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmhbh\" (UniqueName: \"kubernetes.io/projected/4fcaefee-0655-4404-9c4e-a87e706f8666-kube-api-access-vmhbh\") pod \"4fcaefee-0655-4404-9c4e-a87e706f8666\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.076043 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-combined-ca-bundle\") pod \"6e99fe60-fb50-446f-af9d-5649ac357b84\" (UID: \"6e99fe60-fb50-446f-af9d-5649ac357b84\") " Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.076113 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fcaefee-0655-4404-9c4e-a87e706f8666-logs\") pod \"4fcaefee-0655-4404-9c4e-a87e706f8666\" (UID: \"4fcaefee-0655-4404-9c4e-a87e706f8666\") " Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.082286 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fcaefee-0655-4404-9c4e-a87e706f8666-logs" (OuterVolumeSpecName: "logs") pod "4fcaefee-0655-4404-9c4e-a87e706f8666" (UID: "4fcaefee-0655-4404-9c4e-a87e706f8666"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.082970 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e99fe60-fb50-446f-af9d-5649ac357b84-logs" (OuterVolumeSpecName: "logs") pod "6e99fe60-fb50-446f-af9d-5649ac357b84" (UID: "6e99fe60-fb50-446f-af9d-5649ac357b84"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.083018 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fcaefee-0655-4404-9c4e-a87e706f8666-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.084079 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fcaefee-0655-4404-9c4e-a87e706f8666-kube-api-access-vmhbh" (OuterVolumeSpecName: "kube-api-access-vmhbh") pod "4fcaefee-0655-4404-9c4e-a87e706f8666" (UID: "4fcaefee-0655-4404-9c4e-a87e706f8666"). InnerVolumeSpecName "kube-api-access-vmhbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.106300 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-config-data" (OuterVolumeSpecName: "config-data") pod "6e99fe60-fb50-446f-af9d-5649ac357b84" (UID: "6e99fe60-fb50-446f-af9d-5649ac357b84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.108742 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e99fe60-fb50-446f-af9d-5649ac357b84-kube-api-access-7hft7" (OuterVolumeSpecName: "kube-api-access-7hft7") pod "6e99fe60-fb50-446f-af9d-5649ac357b84" (UID: "6e99fe60-fb50-446f-af9d-5649ac357b84"). InnerVolumeSpecName "kube-api-access-7hft7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.117689 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-config-data" (OuterVolumeSpecName: "config-data") pod "4fcaefee-0655-4404-9c4e-a87e706f8666" (UID: "4fcaefee-0655-4404-9c4e-a87e706f8666"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.122942 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e99fe60-fb50-446f-af9d-5649ac357b84" (UID: "6e99fe60-fb50-446f-af9d-5649ac357b84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.136631 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fcaefee-0655-4404-9c4e-a87e706f8666" (UID: "4fcaefee-0655-4404-9c4e-a87e706f8666"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.188667 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.188720 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hft7\" (UniqueName: \"kubernetes.io/projected/6e99fe60-fb50-446f-af9d-5649ac357b84-kube-api-access-7hft7\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.188737 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e99fe60-fb50-446f-af9d-5649ac357b84-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.188754 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.188773 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e99fe60-fb50-446f-af9d-5649ac357b84-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.188787 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fcaefee-0655-4404-9c4e-a87e706f8666-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.188800 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmhbh\" (UniqueName: \"kubernetes.io/projected/4fcaefee-0655-4404-9c4e-a87e706f8666-kube-api-access-vmhbh\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.272797 4922 generic.go:334] "Generic (PLEG): container finished" podID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerID="eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32" exitCode=0 Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.272914 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.272923 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e99fe60-fb50-446f-af9d-5649ac357b84","Type":"ContainerDied","Data":"eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32"} Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.273018 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e99fe60-fb50-446f-af9d-5649ac357b84","Type":"ContainerDied","Data":"f71e27e9538764cc230493e435249bb305f0654ca5907a6bfb79d8ddc17d4019"} Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.273057 4922 scope.go:117] "RemoveContainer" containerID="eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.291271 4922 generic.go:334] "Generic (PLEG): container finished" podID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerID="84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f" exitCode=0 Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.291353 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fcaefee-0655-4404-9c4e-a87e706f8666","Type":"ContainerDied","Data":"84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f"} Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.291363 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.291443 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fcaefee-0655-4404-9c4e-a87e706f8666","Type":"ContainerDied","Data":"d94080cafcfcaebafa2027ead77a30b89ed2bdcaecf2af21164ffcd620f604d7"} Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.325642 4922 scope.go:117] "RemoveContainer" containerID="0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.328988 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.340168 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.371775 4922 scope.go:117] "RemoveContainer" containerID="eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32" Sep 30 00:02:11 crc kubenswrapper[4922]: E0930 00:02:11.379017 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32\": container with ID starting with eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32 not found: ID does not exist" containerID="eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.379078 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32"} err="failed to get container status \"eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32\": rpc error: code = NotFound desc = could not find container \"eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32\": container with ID starting with eced7a12cd72e790e743c0b80e0d4bf2085b0bcdc7e0fc198e8a6c410d13ae32 not found: ID does not exist" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.379112 4922 scope.go:117] "RemoveContainer" containerID="0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da" Sep 30 00:02:11 crc kubenswrapper[4922]: E0930 00:02:11.379882 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da\": container with ID starting with 0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da not found: ID does not exist" containerID="0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.379915 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da"} err="failed to get container status \"0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da\": rpc error: code = NotFound desc = could not find container \"0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da\": container with ID starting with 0c5f34842bb7a4333d7f1007121fd7eb62d7b7bce58c5567379ab45a8e78f4da not found: ID does not exist" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.379945 4922 scope.go:117] "RemoveContainer" containerID="84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.382481 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:02:11 crc kubenswrapper[4922]: E0930 00:02:11.383019 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-log" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.383043 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-log" Sep 30 00:02:11 crc kubenswrapper[4922]: E0930 00:02:11.383066 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-api" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.383074 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-api" Sep 30 00:02:11 crc kubenswrapper[4922]: E0930 00:02:11.383102 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-log" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.383111 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-log" Sep 30 00:02:11 crc kubenswrapper[4922]: E0930 00:02:11.383129 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-metadata" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.383137 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-metadata" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.383362 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-api" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.383383 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-log" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.383421 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" containerName="nova-api-log" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.383448 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" containerName="nova-metadata-metadata" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.385089 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.398917 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.399093 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.416933 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.423229 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.425009 4922 scope.go:117] "RemoveContainer" containerID="1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.430376 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.432849 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.435240 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.441111 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.452185 4922 scope.go:117] "RemoveContainer" containerID="84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f" Sep 30 00:02:11 crc kubenswrapper[4922]: E0930 00:02:11.452842 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f\": container with ID starting with 84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f not found: ID does not exist" containerID="84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.452878 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f"} err="failed to get container status \"84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f\": rpc error: code = NotFound desc = could not find container \"84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f\": container with ID starting with 84f288970101aee5f3df8389f2dbb1e94221ad7021f5b991727b0799ae6e895f not found: ID does not exist" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.452902 4922 scope.go:117] "RemoveContainer" containerID="1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35" Sep 30 00:02:11 crc kubenswrapper[4922]: E0930 00:02:11.453351 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35\": container with ID starting with 1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35 not found: ID does not exist" containerID="1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.453509 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35"} err="failed to get container status \"1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35\": rpc error: code = NotFound desc = could not find container \"1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35\": container with ID starting with 1bd81299000a57c9eaaabc16518886c0c16656e0b1331b780faff515d4dfff35 not found: ID does not exist" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.494496 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeaba508-3582-4908-9e9c-3dbd53460fe0-logs\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.494545 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.494816 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.494854 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-config-data\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.494879 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-config-data\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.494975 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxms2\" (UniqueName: \"kubernetes.io/projected/a3610ce4-5db9-417f-9998-f1fa664fcfa8-kube-api-access-gxms2\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.495061 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3610ce4-5db9-417f-9998-f1fa664fcfa8-logs\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.495146 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdmvm\" (UniqueName: \"kubernetes.io/projected/eeaba508-3582-4908-9e9c-3dbd53460fe0-kube-api-access-cdmvm\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.597348 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3610ce4-5db9-417f-9998-f1fa664fcfa8-logs\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.597646 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdmvm\" (UniqueName: \"kubernetes.io/projected/eeaba508-3582-4908-9e9c-3dbd53460fe0-kube-api-access-cdmvm\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.597863 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3610ce4-5db9-417f-9998-f1fa664fcfa8-logs\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.598103 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.598239 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeaba508-3582-4908-9e9c-3dbd53460fe0-logs\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.598514 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.598619 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-config-data\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.598715 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-config-data\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.598809 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeaba508-3582-4908-9e9c-3dbd53460fe0-logs\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.599031 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxms2\" (UniqueName: \"kubernetes.io/projected/a3610ce4-5db9-417f-9998-f1fa664fcfa8-kube-api-access-gxms2\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.602602 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.602927 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.603589 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-config-data\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.604727 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-config-data\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.615823 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxms2\" (UniqueName: \"kubernetes.io/projected/a3610ce4-5db9-417f-9998-f1fa664fcfa8-kube-api-access-gxms2\") pod \"nova-api-0\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " pod="openstack/nova-api-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.616945 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdmvm\" (UniqueName: \"kubernetes.io/projected/eeaba508-3582-4908-9e9c-3dbd53460fe0-kube-api-access-cdmvm\") pod \"nova-metadata-0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.715221 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:02:11 crc kubenswrapper[4922]: I0930 00:02:11.758758 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:02:12 crc kubenswrapper[4922]: I0930 00:02:12.032079 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:02:12 crc kubenswrapper[4922]: W0930 00:02:12.041001 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeeaba508_3582_4908_9e9c_3dbd53460fe0.slice/crio-cd6a025129f42bcbd98b440942de1cf06f0418a2f193c29e6f9b31714909e6e2 WatchSource:0}: Error finding container cd6a025129f42bcbd98b440942de1cf06f0418a2f193c29e6f9b31714909e6e2: Status 404 returned error can't find the container with id cd6a025129f42bcbd98b440942de1cf06f0418a2f193c29e6f9b31714909e6e2 Sep 30 00:02:12 crc kubenswrapper[4922]: I0930 00:02:12.298868 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:02:12 crc kubenswrapper[4922]: W0930 00:02:12.305293 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3610ce4_5db9_417f_9998_f1fa664fcfa8.slice/crio-5940e9e1e34bcb8ec116b88e3aa23583449b1e1a54abc55b29742dff5c8acbd3 WatchSource:0}: Error finding container 5940e9e1e34bcb8ec116b88e3aa23583449b1e1a54abc55b29742dff5c8acbd3: Status 404 returned error can't find the container with id 5940e9e1e34bcb8ec116b88e3aa23583449b1e1a54abc55b29742dff5c8acbd3 Sep 30 00:02:12 crc kubenswrapper[4922]: I0930 00:02:12.306941 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eeaba508-3582-4908-9e9c-3dbd53460fe0","Type":"ContainerStarted","Data":"2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa"} Sep 30 00:02:12 crc kubenswrapper[4922]: I0930 00:02:12.306994 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eeaba508-3582-4908-9e9c-3dbd53460fe0","Type":"ContainerStarted","Data":"cd6a025129f42bcbd98b440942de1cf06f0418a2f193c29e6f9b31714909e6e2"} Sep 30 00:02:12 crc kubenswrapper[4922]: I0930 00:02:12.432026 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fcaefee-0655-4404-9c4e-a87e706f8666" path="/var/lib/kubelet/pods/4fcaefee-0655-4404-9c4e-a87e706f8666/volumes" Sep 30 00:02:12 crc kubenswrapper[4922]: I0930 00:02:12.433027 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e99fe60-fb50-446f-af9d-5649ac357b84" path="/var/lib/kubelet/pods/6e99fe60-fb50-446f-af9d-5649ac357b84/volumes" Sep 30 00:02:13 crc kubenswrapper[4922]: I0930 00:02:13.321424 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eeaba508-3582-4908-9e9c-3dbd53460fe0","Type":"ContainerStarted","Data":"fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e"} Sep 30 00:02:13 crc kubenswrapper[4922]: I0930 00:02:13.325964 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3610ce4-5db9-417f-9998-f1fa664fcfa8","Type":"ContainerStarted","Data":"8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed"} Sep 30 00:02:13 crc kubenswrapper[4922]: I0930 00:02:13.325995 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3610ce4-5db9-417f-9998-f1fa664fcfa8","Type":"ContainerStarted","Data":"5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5"} Sep 30 00:02:13 crc kubenswrapper[4922]: I0930 00:02:13.326016 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3610ce4-5db9-417f-9998-f1fa664fcfa8","Type":"ContainerStarted","Data":"5940e9e1e34bcb8ec116b88e3aa23583449b1e1a54abc55b29742dff5c8acbd3"} Sep 30 00:02:13 crc kubenswrapper[4922]: I0930 00:02:13.375594 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.375563142 podStartE2EDuration="2.375563142s" podCreationTimestamp="2025-09-30 00:02:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:02:13.358178583 +0000 UTC m=+5737.668467426" watchObservedRunningTime="2025-09-30 00:02:13.375563142 +0000 UTC m=+5737.685851995" Sep 30 00:02:13 crc kubenswrapper[4922]: I0930 00:02:13.390532 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.390504 podStartE2EDuration="2.390504s" podCreationTimestamp="2025-09-30 00:02:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:02:13.387051415 +0000 UTC m=+5737.697340268" watchObservedRunningTime="2025-09-30 00:02:13.390504 +0000 UTC m=+5737.700792843" Sep 30 00:02:14 crc kubenswrapper[4922]: I0930 00:02:14.651742 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 00:02:16 crc kubenswrapper[4922]: I0930 00:02:16.716647 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:02:16 crc kubenswrapper[4922]: I0930 00:02:16.718524 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:02:19 crc kubenswrapper[4922]: I0930 00:02:19.651816 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 00:02:19 crc kubenswrapper[4922]: I0930 00:02:19.675832 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 00:02:20 crc kubenswrapper[4922]: I0930 00:02:20.464967 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 00:02:21 crc kubenswrapper[4922]: I0930 00:02:21.716777 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:02:21 crc kubenswrapper[4922]: I0930 00:02:21.717222 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:02:21 crc kubenswrapper[4922]: I0930 00:02:21.760465 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:02:21 crc kubenswrapper[4922]: I0930 00:02:21.760541 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:02:22 crc kubenswrapper[4922]: I0930 00:02:22.799615 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.73:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:02:22 crc kubenswrapper[4922]: I0930 00:02:22.799949 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.73:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:02:22 crc kubenswrapper[4922]: I0930 00:02:22.883449 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.74:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:02:22 crc kubenswrapper[4922]: I0930 00:02:22.883593 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.74:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:02:28 crc kubenswrapper[4922]: I0930 00:02:28.912547 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:02:28 crc kubenswrapper[4922]: I0930 00:02:28.913293 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:02:31 crc kubenswrapper[4922]: I0930 00:02:31.719096 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 00:02:31 crc kubenswrapper[4922]: I0930 00:02:31.719922 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 00:02:31 crc kubenswrapper[4922]: I0930 00:02:31.722989 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 00:02:31 crc kubenswrapper[4922]: I0930 00:02:31.729679 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 00:02:31 crc kubenswrapper[4922]: I0930 00:02:31.771181 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 00:02:31 crc kubenswrapper[4922]: I0930 00:02:31.772010 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 00:02:31 crc kubenswrapper[4922]: I0930 00:02:31.772059 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 00:02:31 crc kubenswrapper[4922]: I0930 00:02:31.778774 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.566360 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.573362 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.774657 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bdcf96fbf-bccqt"] Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.779773 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.789036 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bdcf96fbf-bccqt"] Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.832894 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-nb\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.833181 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-config\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.833202 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsmb2\" (UniqueName: \"kubernetes.io/projected/8cae3ab8-8889-45f3-81a7-489cb4c28c50-kube-api-access-xsmb2\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.833244 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-sb\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.833263 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-dns-svc\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.935502 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-nb\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.935553 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-config\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.935587 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsmb2\" (UniqueName: \"kubernetes.io/projected/8cae3ab8-8889-45f3-81a7-489cb4c28c50-kube-api-access-xsmb2\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.935643 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-sb\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.935668 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-dns-svc\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.936521 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-config\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.936542 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-sb\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.936917 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-dns-svc\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.937235 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-nb\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:32 crc kubenswrapper[4922]: I0930 00:02:32.951947 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsmb2\" (UniqueName: \"kubernetes.io/projected/8cae3ab8-8889-45f3-81a7-489cb4c28c50-kube-api-access-xsmb2\") pod \"dnsmasq-dns-6bdcf96fbf-bccqt\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:33 crc kubenswrapper[4922]: I0930 00:02:33.103416 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:33 crc kubenswrapper[4922]: I0930 00:02:33.696207 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bdcf96fbf-bccqt"] Sep 30 00:02:34 crc kubenswrapper[4922]: I0930 00:02:34.590865 4922 generic.go:334] "Generic (PLEG): container finished" podID="8cae3ab8-8889-45f3-81a7-489cb4c28c50" containerID="602cb02e2d2377dab3fb807001a742efe33b16dff3ed19212b95a3e0de0d50fc" exitCode=0 Sep 30 00:02:34 crc kubenswrapper[4922]: I0930 00:02:34.590933 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" event={"ID":"8cae3ab8-8889-45f3-81a7-489cb4c28c50","Type":"ContainerDied","Data":"602cb02e2d2377dab3fb807001a742efe33b16dff3ed19212b95a3e0de0d50fc"} Sep 30 00:02:34 crc kubenswrapper[4922]: I0930 00:02:34.591289 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" event={"ID":"8cae3ab8-8889-45f3-81a7-489cb4c28c50","Type":"ContainerStarted","Data":"21ea169e11787c727342d86a21ac7a03d636a61e92371caaa320d5cda71bb3de"} Sep 30 00:02:35 crc kubenswrapper[4922]: I0930 00:02:35.601229 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" event={"ID":"8cae3ab8-8889-45f3-81a7-489cb4c28c50","Type":"ContainerStarted","Data":"db65aad8b0013beb4c6f54a2fc886f8683298f09ad98d7e34556656c25b559b3"} Sep 30 00:02:35 crc kubenswrapper[4922]: I0930 00:02:35.601611 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:35 crc kubenswrapper[4922]: I0930 00:02:35.639310 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" podStartSLOduration=3.639291549 podStartE2EDuration="3.639291549s" podCreationTimestamp="2025-09-30 00:02:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:02:35.626563645 +0000 UTC m=+5759.936852488" watchObservedRunningTime="2025-09-30 00:02:35.639291549 +0000 UTC m=+5759.949580362" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.105624 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.180847 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d79b984cc-pjwcs"] Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.181209 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" podUID="1c84a740-1f70-400d-a56a-ac889071f45e" containerName="dnsmasq-dns" containerID="cri-o://41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191" gracePeriod=10 Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.666207 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.689432 4922 generic.go:334] "Generic (PLEG): container finished" podID="1c84a740-1f70-400d-a56a-ac889071f45e" containerID="41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191" exitCode=0 Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.689674 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" event={"ID":"1c84a740-1f70-400d-a56a-ac889071f45e","Type":"ContainerDied","Data":"41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191"} Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.691061 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" event={"ID":"1c84a740-1f70-400d-a56a-ac889071f45e","Type":"ContainerDied","Data":"14b0db0c8cfaa0a047cf39948678f25605ab352b22dcc440d4ce03b7ab7b9ceb"} Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.691175 4922 scope.go:117] "RemoveContainer" containerID="41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.691450 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d79b984cc-pjwcs" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.723551 4922 scope.go:117] "RemoveContainer" containerID="0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.750904 4922 scope.go:117] "RemoveContainer" containerID="41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191" Sep 30 00:02:43 crc kubenswrapper[4922]: E0930 00:02:43.751420 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191\": container with ID starting with 41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191 not found: ID does not exist" containerID="41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.751450 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191"} err="failed to get container status \"41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191\": rpc error: code = NotFound desc = could not find container \"41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191\": container with ID starting with 41add31cb1b44e6ea9e95a19caaf7a0f76212cfd6f23b56a22603216eafec191 not found: ID does not exist" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.751474 4922 scope.go:117] "RemoveContainer" containerID="0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306" Sep 30 00:02:43 crc kubenswrapper[4922]: E0930 00:02:43.751734 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306\": container with ID starting with 0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306 not found: ID does not exist" containerID="0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.751767 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306"} err="failed to get container status \"0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306\": rpc error: code = NotFound desc = could not find container \"0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306\": container with ID starting with 0d56269610d794ff4cb9b87fdb67d6cc234fa8ac38ac2f3bf1ebf326b04d3306 not found: ID does not exist" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.847484 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-nb\") pod \"1c84a740-1f70-400d-a56a-ac889071f45e\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.847576 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-dns-svc\") pod \"1c84a740-1f70-400d-a56a-ac889071f45e\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.847665 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-config\") pod \"1c84a740-1f70-400d-a56a-ac889071f45e\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.847702 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkbps\" (UniqueName: \"kubernetes.io/projected/1c84a740-1f70-400d-a56a-ac889071f45e-kube-api-access-qkbps\") pod \"1c84a740-1f70-400d-a56a-ac889071f45e\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.847726 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-sb\") pod \"1c84a740-1f70-400d-a56a-ac889071f45e\" (UID: \"1c84a740-1f70-400d-a56a-ac889071f45e\") " Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.856303 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c84a740-1f70-400d-a56a-ac889071f45e-kube-api-access-qkbps" (OuterVolumeSpecName: "kube-api-access-qkbps") pod "1c84a740-1f70-400d-a56a-ac889071f45e" (UID: "1c84a740-1f70-400d-a56a-ac889071f45e"). InnerVolumeSpecName "kube-api-access-qkbps". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.895152 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1c84a740-1f70-400d-a56a-ac889071f45e" (UID: "1c84a740-1f70-400d-a56a-ac889071f45e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.900939 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1c84a740-1f70-400d-a56a-ac889071f45e" (UID: "1c84a740-1f70-400d-a56a-ac889071f45e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.905750 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1c84a740-1f70-400d-a56a-ac889071f45e" (UID: "1c84a740-1f70-400d-a56a-ac889071f45e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.921608 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-config" (OuterVolumeSpecName: "config") pod "1c84a740-1f70-400d-a56a-ac889071f45e" (UID: "1c84a740-1f70-400d-a56a-ac889071f45e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.949351 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.949400 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.949409 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.949419 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkbps\" (UniqueName: \"kubernetes.io/projected/1c84a740-1f70-400d-a56a-ac889071f45e-kube-api-access-qkbps\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:43 crc kubenswrapper[4922]: I0930 00:02:43.949427 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1c84a740-1f70-400d-a56a-ac889071f45e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:44 crc kubenswrapper[4922]: I0930 00:02:44.026179 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d79b984cc-pjwcs"] Sep 30 00:02:44 crc kubenswrapper[4922]: I0930 00:02:44.033980 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d79b984cc-pjwcs"] Sep 30 00:02:44 crc kubenswrapper[4922]: I0930 00:02:44.432083 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c84a740-1f70-400d-a56a-ac889071f45e" path="/var/lib/kubelet/pods/1c84a740-1f70-400d-a56a-ac889071f45e/volumes" Sep 30 00:02:46 crc kubenswrapper[4922]: I0930 00:02:46.874127 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-dmdzw"] Sep 30 00:02:46 crc kubenswrapper[4922]: E0930 00:02:46.874982 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c84a740-1f70-400d-a56a-ac889071f45e" containerName="init" Sep 30 00:02:46 crc kubenswrapper[4922]: I0930 00:02:46.874995 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c84a740-1f70-400d-a56a-ac889071f45e" containerName="init" Sep 30 00:02:46 crc kubenswrapper[4922]: E0930 00:02:46.875008 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c84a740-1f70-400d-a56a-ac889071f45e" containerName="dnsmasq-dns" Sep 30 00:02:46 crc kubenswrapper[4922]: I0930 00:02:46.875014 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c84a740-1f70-400d-a56a-ac889071f45e" containerName="dnsmasq-dns" Sep 30 00:02:46 crc kubenswrapper[4922]: I0930 00:02:46.875202 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c84a740-1f70-400d-a56a-ac889071f45e" containerName="dnsmasq-dns" Sep 30 00:02:46 crc kubenswrapper[4922]: I0930 00:02:46.875792 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dmdzw" Sep 30 00:02:46 crc kubenswrapper[4922]: I0930 00:02:46.887270 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-dmdzw"] Sep 30 00:02:47 crc kubenswrapper[4922]: I0930 00:02:47.007520 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qhdg\" (UniqueName: \"kubernetes.io/projected/b19a4ef3-308d-48fd-9f7d-e932f861145c-kube-api-access-4qhdg\") pod \"cinder-db-create-dmdzw\" (UID: \"b19a4ef3-308d-48fd-9f7d-e932f861145c\") " pod="openstack/cinder-db-create-dmdzw" Sep 30 00:02:47 crc kubenswrapper[4922]: I0930 00:02:47.109013 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qhdg\" (UniqueName: \"kubernetes.io/projected/b19a4ef3-308d-48fd-9f7d-e932f861145c-kube-api-access-4qhdg\") pod \"cinder-db-create-dmdzw\" (UID: \"b19a4ef3-308d-48fd-9f7d-e932f861145c\") " pod="openstack/cinder-db-create-dmdzw" Sep 30 00:02:47 crc kubenswrapper[4922]: I0930 00:02:47.129002 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qhdg\" (UniqueName: \"kubernetes.io/projected/b19a4ef3-308d-48fd-9f7d-e932f861145c-kube-api-access-4qhdg\") pod \"cinder-db-create-dmdzw\" (UID: \"b19a4ef3-308d-48fd-9f7d-e932f861145c\") " pod="openstack/cinder-db-create-dmdzw" Sep 30 00:02:47 crc kubenswrapper[4922]: I0930 00:02:47.196524 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dmdzw" Sep 30 00:02:47 crc kubenswrapper[4922]: I0930 00:02:47.663787 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-dmdzw"] Sep 30 00:02:47 crc kubenswrapper[4922]: W0930 00:02:47.669126 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb19a4ef3_308d_48fd_9f7d_e932f861145c.slice/crio-eaad885cd366d0e4fd6fa3f0c756eb49c23f4a919c84f00922258691b4baa13a WatchSource:0}: Error finding container eaad885cd366d0e4fd6fa3f0c756eb49c23f4a919c84f00922258691b4baa13a: Status 404 returned error can't find the container with id eaad885cd366d0e4fd6fa3f0c756eb49c23f4a919c84f00922258691b4baa13a Sep 30 00:02:47 crc kubenswrapper[4922]: I0930 00:02:47.733716 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dmdzw" event={"ID":"b19a4ef3-308d-48fd-9f7d-e932f861145c","Type":"ContainerStarted","Data":"eaad885cd366d0e4fd6fa3f0c756eb49c23f4a919c84f00922258691b4baa13a"} Sep 30 00:02:48 crc kubenswrapper[4922]: I0930 00:02:48.758051 4922 generic.go:334] "Generic (PLEG): container finished" podID="b19a4ef3-308d-48fd-9f7d-e932f861145c" containerID="17aefab3d1aa428534857f44817f58d500b24c209055816efcaeaefcdf5a91a1" exitCode=0 Sep 30 00:02:48 crc kubenswrapper[4922]: I0930 00:02:48.758203 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dmdzw" event={"ID":"b19a4ef3-308d-48fd-9f7d-e932f861145c","Type":"ContainerDied","Data":"17aefab3d1aa428534857f44817f58d500b24c209055816efcaeaefcdf5a91a1"} Sep 30 00:02:50 crc kubenswrapper[4922]: I0930 00:02:50.191118 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dmdzw" Sep 30 00:02:50 crc kubenswrapper[4922]: I0930 00:02:50.376812 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qhdg\" (UniqueName: \"kubernetes.io/projected/b19a4ef3-308d-48fd-9f7d-e932f861145c-kube-api-access-4qhdg\") pod \"b19a4ef3-308d-48fd-9f7d-e932f861145c\" (UID: \"b19a4ef3-308d-48fd-9f7d-e932f861145c\") " Sep 30 00:02:50 crc kubenswrapper[4922]: I0930 00:02:50.383624 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b19a4ef3-308d-48fd-9f7d-e932f861145c-kube-api-access-4qhdg" (OuterVolumeSpecName: "kube-api-access-4qhdg") pod "b19a4ef3-308d-48fd-9f7d-e932f861145c" (UID: "b19a4ef3-308d-48fd-9f7d-e932f861145c"). InnerVolumeSpecName "kube-api-access-4qhdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:02:50 crc kubenswrapper[4922]: I0930 00:02:50.479770 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qhdg\" (UniqueName: \"kubernetes.io/projected/b19a4ef3-308d-48fd-9f7d-e932f861145c-kube-api-access-4qhdg\") on node \"crc\" DevicePath \"\"" Sep 30 00:02:50 crc kubenswrapper[4922]: I0930 00:02:50.776836 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-dmdzw" event={"ID":"b19a4ef3-308d-48fd-9f7d-e932f861145c","Type":"ContainerDied","Data":"eaad885cd366d0e4fd6fa3f0c756eb49c23f4a919c84f00922258691b4baa13a"} Sep 30 00:02:50 crc kubenswrapper[4922]: I0930 00:02:50.777063 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eaad885cd366d0e4fd6fa3f0c756eb49c23f4a919c84f00922258691b4baa13a" Sep 30 00:02:50 crc kubenswrapper[4922]: I0930 00:02:50.776897 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-dmdzw" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.007009 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-1bb4-account-create-whbth"] Sep 30 00:02:57 crc kubenswrapper[4922]: E0930 00:02:57.007935 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b19a4ef3-308d-48fd-9f7d-e932f861145c" containerName="mariadb-database-create" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.007949 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b19a4ef3-308d-48fd-9f7d-e932f861145c" containerName="mariadb-database-create" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.008121 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b19a4ef3-308d-48fd-9f7d-e932f861145c" containerName="mariadb-database-create" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.008805 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1bb4-account-create-whbth" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.011718 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.019288 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1bb4-account-create-whbth"] Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.114453 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tp7p\" (UniqueName: \"kubernetes.io/projected/6d8783f8-224b-447a-92aa-83c7c39a1b09-kube-api-access-9tp7p\") pod \"cinder-1bb4-account-create-whbth\" (UID: \"6d8783f8-224b-447a-92aa-83c7c39a1b09\") " pod="openstack/cinder-1bb4-account-create-whbth" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.216776 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tp7p\" (UniqueName: \"kubernetes.io/projected/6d8783f8-224b-447a-92aa-83c7c39a1b09-kube-api-access-9tp7p\") pod \"cinder-1bb4-account-create-whbth\" (UID: \"6d8783f8-224b-447a-92aa-83c7c39a1b09\") " pod="openstack/cinder-1bb4-account-create-whbth" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.247308 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tp7p\" (UniqueName: \"kubernetes.io/projected/6d8783f8-224b-447a-92aa-83c7c39a1b09-kube-api-access-9tp7p\") pod \"cinder-1bb4-account-create-whbth\" (UID: \"6d8783f8-224b-447a-92aa-83c7c39a1b09\") " pod="openstack/cinder-1bb4-account-create-whbth" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.347090 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1bb4-account-create-whbth" Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.811463 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-1bb4-account-create-whbth"] Sep 30 00:02:57 crc kubenswrapper[4922]: W0930 00:02:57.816447 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d8783f8_224b_447a_92aa_83c7c39a1b09.slice/crio-2cdc82470e861bee2d171bd750d66c7ce4efe245d9394c6ee7c9d30d7ec5f8d5 WatchSource:0}: Error finding container 2cdc82470e861bee2d171bd750d66c7ce4efe245d9394c6ee7c9d30d7ec5f8d5: Status 404 returned error can't find the container with id 2cdc82470e861bee2d171bd750d66c7ce4efe245d9394c6ee7c9d30d7ec5f8d5 Sep 30 00:02:57 crc kubenswrapper[4922]: I0930 00:02:57.862437 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1bb4-account-create-whbth" event={"ID":"6d8783f8-224b-447a-92aa-83c7c39a1b09","Type":"ContainerStarted","Data":"2cdc82470e861bee2d171bd750d66c7ce4efe245d9394c6ee7c9d30d7ec5f8d5"} Sep 30 00:02:58 crc kubenswrapper[4922]: I0930 00:02:58.877121 4922 generic.go:334] "Generic (PLEG): container finished" podID="6d8783f8-224b-447a-92aa-83c7c39a1b09" containerID="6323babe2626d6ebaea5764df276119afcfe204083fd03ff0abd4c5e21ad4eda" exitCode=0 Sep 30 00:02:58 crc kubenswrapper[4922]: I0930 00:02:58.877469 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1bb4-account-create-whbth" event={"ID":"6d8783f8-224b-447a-92aa-83c7c39a1b09","Type":"ContainerDied","Data":"6323babe2626d6ebaea5764df276119afcfe204083fd03ff0abd4c5e21ad4eda"} Sep 30 00:02:58 crc kubenswrapper[4922]: I0930 00:02:58.913555 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:02:58 crc kubenswrapper[4922]: I0930 00:02:58.913658 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:02:58 crc kubenswrapper[4922]: I0930 00:02:58.913735 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:02:58 crc kubenswrapper[4922]: I0930 00:02:58.914768 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"86380e93f24d31c94f945d418afe09f1011e2f6445c628fb457b28215153e610"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:02:58 crc kubenswrapper[4922]: I0930 00:02:58.914877 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://86380e93f24d31c94f945d418afe09f1011e2f6445c628fb457b28215153e610" gracePeriod=600 Sep 30 00:02:59 crc kubenswrapper[4922]: I0930 00:02:59.906108 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="86380e93f24d31c94f945d418afe09f1011e2f6445c628fb457b28215153e610" exitCode=0 Sep 30 00:02:59 crc kubenswrapper[4922]: I0930 00:02:59.906760 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"86380e93f24d31c94f945d418afe09f1011e2f6445c628fb457b28215153e610"} Sep 30 00:02:59 crc kubenswrapper[4922]: I0930 00:02:59.907437 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4"} Sep 30 00:02:59 crc kubenswrapper[4922]: I0930 00:02:59.907480 4922 scope.go:117] "RemoveContainer" containerID="5050e8161e2c40dfe15569148acca65ce4b4b0e0ea85fc19d4b92d1d85f759c7" Sep 30 00:03:00 crc kubenswrapper[4922]: I0930 00:03:00.325362 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1bb4-account-create-whbth" Sep 30 00:03:00 crc kubenswrapper[4922]: I0930 00:03:00.486333 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tp7p\" (UniqueName: \"kubernetes.io/projected/6d8783f8-224b-447a-92aa-83c7c39a1b09-kube-api-access-9tp7p\") pod \"6d8783f8-224b-447a-92aa-83c7c39a1b09\" (UID: \"6d8783f8-224b-447a-92aa-83c7c39a1b09\") " Sep 30 00:03:00 crc kubenswrapper[4922]: I0930 00:03:00.499766 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d8783f8-224b-447a-92aa-83c7c39a1b09-kube-api-access-9tp7p" (OuterVolumeSpecName: "kube-api-access-9tp7p") pod "6d8783f8-224b-447a-92aa-83c7c39a1b09" (UID: "6d8783f8-224b-447a-92aa-83c7c39a1b09"). InnerVolumeSpecName "kube-api-access-9tp7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:00 crc kubenswrapper[4922]: I0930 00:03:00.588653 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tp7p\" (UniqueName: \"kubernetes.io/projected/6d8783f8-224b-447a-92aa-83c7c39a1b09-kube-api-access-9tp7p\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:00 crc kubenswrapper[4922]: I0930 00:03:00.919969 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-1bb4-account-create-whbth" event={"ID":"6d8783f8-224b-447a-92aa-83c7c39a1b09","Type":"ContainerDied","Data":"2cdc82470e861bee2d171bd750d66c7ce4efe245d9394c6ee7c9d30d7ec5f8d5"} Sep 30 00:03:00 crc kubenswrapper[4922]: I0930 00:03:00.920019 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cdc82470e861bee2d171bd750d66c7ce4efe245d9394c6ee7c9d30d7ec5f8d5" Sep 30 00:03:00 crc kubenswrapper[4922]: I0930 00:03:00.920106 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-1bb4-account-create-whbth" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.314179 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-sp7xk"] Sep 30 00:03:02 crc kubenswrapper[4922]: E0930 00:03:02.316375 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d8783f8-224b-447a-92aa-83c7c39a1b09" containerName="mariadb-account-create" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.316406 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d8783f8-224b-447a-92aa-83c7c39a1b09" containerName="mariadb-account-create" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.316619 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d8783f8-224b-447a-92aa-83c7c39a1b09" containerName="mariadb-account-create" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.317366 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.319723 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.320976 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-nzndc" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.322684 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.326921 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sp7xk"] Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.423048 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89xjp\" (UniqueName: \"kubernetes.io/projected/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-kube-api-access-89xjp\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.423190 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-combined-ca-bundle\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.423224 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-db-sync-config-data\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.423248 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-scripts\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.423424 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-etc-machine-id\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.423473 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-config-data\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.525800 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-etc-machine-id\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.526057 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-config-data\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.526210 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-etc-machine-id\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.527885 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89xjp\" (UniqueName: \"kubernetes.io/projected/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-kube-api-access-89xjp\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.527992 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-combined-ca-bundle\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.528021 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-db-sync-config-data\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.528045 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-scripts\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.536103 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-config-data\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.538801 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-scripts\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.540634 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-combined-ca-bundle\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.541603 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-db-sync-config-data\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.555496 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89xjp\" (UniqueName: \"kubernetes.io/projected/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-kube-api-access-89xjp\") pod \"cinder-db-sync-sp7xk\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:02 crc kubenswrapper[4922]: I0930 00:03:02.658688 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:03 crc kubenswrapper[4922]: W0930 00:03:03.166922 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeaf8102b_90d7_4cb7_a1ff_d49dc6e6ab83.slice/crio-deecad2ad34c312fa925fa8afeff3ad932043e2c5bbd84fe0674a8fc03a9a52a WatchSource:0}: Error finding container deecad2ad34c312fa925fa8afeff3ad932043e2c5bbd84fe0674a8fc03a9a52a: Status 404 returned error can't find the container with id deecad2ad34c312fa925fa8afeff3ad932043e2c5bbd84fe0674a8fc03a9a52a Sep 30 00:03:03 crc kubenswrapper[4922]: I0930 00:03:03.176576 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sp7xk"] Sep 30 00:03:03 crc kubenswrapper[4922]: I0930 00:03:03.972093 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sp7xk" event={"ID":"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83","Type":"ContainerStarted","Data":"e793d3dc7512e712bfa88fa021bf134db6fcb7feaba47be953447a5d7f3153aa"} Sep 30 00:03:03 crc kubenswrapper[4922]: I0930 00:03:03.973028 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sp7xk" event={"ID":"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83","Type":"ContainerStarted","Data":"deecad2ad34c312fa925fa8afeff3ad932043e2c5bbd84fe0674a8fc03a9a52a"} Sep 30 00:03:03 crc kubenswrapper[4922]: I0930 00:03:03.992873 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-sp7xk" podStartSLOduration=1.9928490509999999 podStartE2EDuration="1.992849051s" podCreationTimestamp="2025-09-30 00:03:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:03.990300118 +0000 UTC m=+5788.300588991" watchObservedRunningTime="2025-09-30 00:03:03.992849051 +0000 UTC m=+5788.303137894" Sep 30 00:03:07 crc kubenswrapper[4922]: I0930 00:03:07.008761 4922 generic.go:334] "Generic (PLEG): container finished" podID="eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" containerID="e793d3dc7512e712bfa88fa021bf134db6fcb7feaba47be953447a5d7f3153aa" exitCode=0 Sep 30 00:03:07 crc kubenswrapper[4922]: I0930 00:03:07.008850 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sp7xk" event={"ID":"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83","Type":"ContainerDied","Data":"e793d3dc7512e712bfa88fa021bf134db6fcb7feaba47be953447a5d7f3153aa"} Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.456353 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.551883 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-etc-machine-id\") pod \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.551940 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" (UID: "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.552016 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-config-data\") pod \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.552082 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-scripts\") pod \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.552275 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-db-sync-config-data\") pod \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.553191 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89xjp\" (UniqueName: \"kubernetes.io/projected/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-kube-api-access-89xjp\") pod \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.553256 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-combined-ca-bundle\") pod \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\" (UID: \"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83\") " Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.554006 4922 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.558795 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-scripts" (OuterVolumeSpecName: "scripts") pod "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" (UID: "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.559588 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-kube-api-access-89xjp" (OuterVolumeSpecName: "kube-api-access-89xjp") pod "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" (UID: "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83"). InnerVolumeSpecName "kube-api-access-89xjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.564718 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" (UID: "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.586534 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" (UID: "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.619134 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-config-data" (OuterVolumeSpecName: "config-data") pod "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" (UID: "eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.656057 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.656096 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.656109 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.656120 4922 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:08 crc kubenswrapper[4922]: I0930 00:03:08.656131 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89xjp\" (UniqueName: \"kubernetes.io/projected/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83-kube-api-access-89xjp\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.034817 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sp7xk" event={"ID":"eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83","Type":"ContainerDied","Data":"deecad2ad34c312fa925fa8afeff3ad932043e2c5bbd84fe0674a8fc03a9a52a"} Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.035257 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="deecad2ad34c312fa925fa8afeff3ad932043e2c5bbd84fe0674a8fc03a9a52a" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.035021 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sp7xk" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.392686 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-564f6b675c-t4h5f"] Sep 30 00:03:09 crc kubenswrapper[4922]: E0930 00:03:09.393148 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" containerName="cinder-db-sync" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.393169 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" containerName="cinder-db-sync" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.393450 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" containerName="cinder-db-sync" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.394617 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.407408 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-564f6b675c-t4h5f"] Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.483091 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbnxk\" (UniqueName: \"kubernetes.io/projected/f229dc10-13bc-4dba-bf6e-0889db6cf260-kube-api-access-bbnxk\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.483167 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-config\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.483298 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-sb\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.483448 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-nb\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.483483 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-dns-svc\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.499221 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.501158 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.503482 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.503790 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-nzndc" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.503925 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.504026 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.519594 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585117 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-scripts\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585167 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data-custom\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585191 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585314 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67kks\" (UniqueName: \"kubernetes.io/projected/88daf340-9131-4476-9b5e-7a070d7c7a82-kube-api-access-67kks\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585375 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-nb\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585423 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-dns-svc\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585442 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbnxk\" (UniqueName: \"kubernetes.io/projected/f229dc10-13bc-4dba-bf6e-0889db6cf260-kube-api-access-bbnxk\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585475 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-config\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585506 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88daf340-9131-4476-9b5e-7a070d7c7a82-logs\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585531 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585581 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/88daf340-9131-4476-9b5e-7a070d7c7a82-etc-machine-id\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.585603 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-sb\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.586418 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-dns-svc\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.587161 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-config\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.587506 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-nb\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.587724 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-sb\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.629306 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbnxk\" (UniqueName: \"kubernetes.io/projected/f229dc10-13bc-4dba-bf6e-0889db6cf260-kube-api-access-bbnxk\") pod \"dnsmasq-dns-564f6b675c-t4h5f\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.686547 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-scripts\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.686589 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data-custom\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.686608 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.686644 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67kks\" (UniqueName: \"kubernetes.io/projected/88daf340-9131-4476-9b5e-7a070d7c7a82-kube-api-access-67kks\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.686695 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88daf340-9131-4476-9b5e-7a070d7c7a82-logs\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.686717 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.686760 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/88daf340-9131-4476-9b5e-7a070d7c7a82-etc-machine-id\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.686865 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/88daf340-9131-4476-9b5e-7a070d7c7a82-etc-machine-id\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.687916 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88daf340-9131-4476-9b5e-7a070d7c7a82-logs\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.692609 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-scripts\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.694198 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.701512 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data-custom\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.703187 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.704838 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67kks\" (UniqueName: \"kubernetes.io/projected/88daf340-9131-4476-9b5e-7a070d7c7a82-kube-api-access-67kks\") pod \"cinder-api-0\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " pod="openstack/cinder-api-0" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.722662 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:09 crc kubenswrapper[4922]: I0930 00:03:09.821554 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:03:10 crc kubenswrapper[4922]: W0930 00:03:10.190895 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf229dc10_13bc_4dba_bf6e_0889db6cf260.slice/crio-7f8efa7866aa2f5c0c50e18158a38ecb525d6de75e0b025f5be5e63eb5e4e688 WatchSource:0}: Error finding container 7f8efa7866aa2f5c0c50e18158a38ecb525d6de75e0b025f5be5e63eb5e4e688: Status 404 returned error can't find the container with id 7f8efa7866aa2f5c0c50e18158a38ecb525d6de75e0b025f5be5e63eb5e4e688 Sep 30 00:03:10 crc kubenswrapper[4922]: I0930 00:03:10.191641 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-564f6b675c-t4h5f"] Sep 30 00:03:10 crc kubenswrapper[4922]: I0930 00:03:10.320431 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:03:10 crc kubenswrapper[4922]: W0930 00:03:10.337067 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88daf340_9131_4476_9b5e_7a070d7c7a82.slice/crio-9bdab6ca708940e1a0b51d9d3f69fb7caa9698d81fae9b291e35dd8f4bc5bfa0 WatchSource:0}: Error finding container 9bdab6ca708940e1a0b51d9d3f69fb7caa9698d81fae9b291e35dd8f4bc5bfa0: Status 404 returned error can't find the container with id 9bdab6ca708940e1a0b51d9d3f69fb7caa9698d81fae9b291e35dd8f4bc5bfa0 Sep 30 00:03:11 crc kubenswrapper[4922]: I0930 00:03:11.055918 4922 generic.go:334] "Generic (PLEG): container finished" podID="f229dc10-13bc-4dba-bf6e-0889db6cf260" containerID="70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139" exitCode=0 Sep 30 00:03:11 crc kubenswrapper[4922]: I0930 00:03:11.056059 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" event={"ID":"f229dc10-13bc-4dba-bf6e-0889db6cf260","Type":"ContainerDied","Data":"70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139"} Sep 30 00:03:11 crc kubenswrapper[4922]: I0930 00:03:11.056126 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" event={"ID":"f229dc10-13bc-4dba-bf6e-0889db6cf260","Type":"ContainerStarted","Data":"7f8efa7866aa2f5c0c50e18158a38ecb525d6de75e0b025f5be5e63eb5e4e688"} Sep 30 00:03:11 crc kubenswrapper[4922]: I0930 00:03:11.060100 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"88daf340-9131-4476-9b5e-7a070d7c7a82","Type":"ContainerStarted","Data":"f8ac858019877d34a1536d81c1e6549821bb23a4ef10e1d8a247ee2e5d25f3e2"} Sep 30 00:03:11 crc kubenswrapper[4922]: I0930 00:03:11.060158 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"88daf340-9131-4476-9b5e-7a070d7c7a82","Type":"ContainerStarted","Data":"9bdab6ca708940e1a0b51d9d3f69fb7caa9698d81fae9b291e35dd8f4bc5bfa0"} Sep 30 00:03:12 crc kubenswrapper[4922]: I0930 00:03:12.072560 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" event={"ID":"f229dc10-13bc-4dba-bf6e-0889db6cf260","Type":"ContainerStarted","Data":"b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d"} Sep 30 00:03:12 crc kubenswrapper[4922]: I0930 00:03:12.072916 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:12 crc kubenswrapper[4922]: I0930 00:03:12.076717 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"88daf340-9131-4476-9b5e-7a070d7c7a82","Type":"ContainerStarted","Data":"e18bad55480bd38b4b32b8d27428b1eb1405b32af92c736675bd7d9d4405d4ff"} Sep 30 00:03:12 crc kubenswrapper[4922]: I0930 00:03:12.076869 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 00:03:12 crc kubenswrapper[4922]: I0930 00:03:12.096295 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" podStartSLOduration=3.096277739 podStartE2EDuration="3.096277739s" podCreationTimestamp="2025-09-30 00:03:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:12.089270416 +0000 UTC m=+5796.399559229" watchObservedRunningTime="2025-09-30 00:03:12.096277739 +0000 UTC m=+5796.406566552" Sep 30 00:03:12 crc kubenswrapper[4922]: I0930 00:03:12.128466 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.128444592 podStartE2EDuration="3.128444592s" podCreationTimestamp="2025-09-30 00:03:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:12.116741194 +0000 UTC m=+5796.427030017" watchObservedRunningTime="2025-09-30 00:03:12.128444592 +0000 UTC m=+5796.438733405" Sep 30 00:03:19 crc kubenswrapper[4922]: I0930 00:03:19.724612 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:03:19 crc kubenswrapper[4922]: I0930 00:03:19.791836 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bdcf96fbf-bccqt"] Sep 30 00:03:19 crc kubenswrapper[4922]: I0930 00:03:19.792312 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" podUID="8cae3ab8-8889-45f3-81a7-489cb4c28c50" containerName="dnsmasq-dns" containerID="cri-o://db65aad8b0013beb4c6f54a2fc886f8683298f09ad98d7e34556656c25b559b3" gracePeriod=10 Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.164280 4922 generic.go:334] "Generic (PLEG): container finished" podID="8cae3ab8-8889-45f3-81a7-489cb4c28c50" containerID="db65aad8b0013beb4c6f54a2fc886f8683298f09ad98d7e34556656c25b559b3" exitCode=0 Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.164323 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" event={"ID":"8cae3ab8-8889-45f3-81a7-489cb4c28c50","Type":"ContainerDied","Data":"db65aad8b0013beb4c6f54a2fc886f8683298f09ad98d7e34556656c25b559b3"} Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.263014 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.397100 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-dns-svc\") pod \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.397173 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-sb\") pod \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.397360 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-nb\") pod \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.397465 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-config\") pod \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.397487 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsmb2\" (UniqueName: \"kubernetes.io/projected/8cae3ab8-8889-45f3-81a7-489cb4c28c50-kube-api-access-xsmb2\") pod \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\" (UID: \"8cae3ab8-8889-45f3-81a7-489cb4c28c50\") " Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.409746 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cae3ab8-8889-45f3-81a7-489cb4c28c50-kube-api-access-xsmb2" (OuterVolumeSpecName: "kube-api-access-xsmb2") pod "8cae3ab8-8889-45f3-81a7-489cb4c28c50" (UID: "8cae3ab8-8889-45f3-81a7-489cb4c28c50"). InnerVolumeSpecName "kube-api-access-xsmb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.477394 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-config" (OuterVolumeSpecName: "config") pod "8cae3ab8-8889-45f3-81a7-489cb4c28c50" (UID: "8cae3ab8-8889-45f3-81a7-489cb4c28c50"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.484157 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8cae3ab8-8889-45f3-81a7-489cb4c28c50" (UID: "8cae3ab8-8889-45f3-81a7-489cb4c28c50"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.496911 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8cae3ab8-8889-45f3-81a7-489cb4c28c50" (UID: "8cae3ab8-8889-45f3-81a7-489cb4c28c50"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.501180 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8cae3ab8-8889-45f3-81a7-489cb4c28c50" (UID: "8cae3ab8-8889-45f3-81a7-489cb4c28c50"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.501647 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.501676 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.501686 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsmb2\" (UniqueName: \"kubernetes.io/projected/8cae3ab8-8889-45f3-81a7-489cb4c28c50-kube-api-access-xsmb2\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.501696 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:20 crc kubenswrapper[4922]: I0930 00:03:20.501703 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8cae3ab8-8889-45f3-81a7-489cb4c28c50-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.185948 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" event={"ID":"8cae3ab8-8889-45f3-81a7-489cb4c28c50","Type":"ContainerDied","Data":"21ea169e11787c727342d86a21ac7a03d636a61e92371caaa320d5cda71bb3de"} Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.186026 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bdcf96fbf-bccqt" Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.186056 4922 scope.go:117] "RemoveContainer" containerID="db65aad8b0013beb4c6f54a2fc886f8683298f09ad98d7e34556656c25b559b3" Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.219004 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bdcf96fbf-bccqt"] Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.226836 4922 scope.go:117] "RemoveContainer" containerID="602cb02e2d2377dab3fb807001a742efe33b16dff3ed19212b95a3e0de0d50fc" Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.227460 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bdcf96fbf-bccqt"] Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.397454 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.397988 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="4182d7b7-7987-4b76-8325-f312ed9dff54" containerName="nova-scheduler-scheduler" containerID="cri-o://bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5" gracePeriod=30 Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.410778 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.411053 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-log" containerID="cri-o://2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa" gracePeriod=30 Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.411156 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-metadata" containerID="cri-o://fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e" gracePeriod=30 Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.430551 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.431121 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="21a2e46f-ece9-4faa-bb2e-d040c4501c1d" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://cc9707d0d49c0441fa34d40bf19b9503f73b99d595caf2f4c6458d520dd7ea36" gracePeriod=30 Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.445518 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.445851 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="832e746b-286e-4134-8e71-448ee79cf1c5" containerName="nova-cell0-conductor-conductor" containerID="cri-o://df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe" gracePeriod=30 Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.461169 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.461727 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-log" containerID="cri-o://5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5" gracePeriod=30 Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.461971 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-api" containerID="cri-o://8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed" gracePeriod=30 Sep 30 00:03:21 crc kubenswrapper[4922]: I0930 00:03:21.859175 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.211180 4922 generic.go:334] "Generic (PLEG): container finished" podID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerID="2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa" exitCode=143 Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.211250 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eeaba508-3582-4908-9e9c-3dbd53460fe0","Type":"ContainerDied","Data":"2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa"} Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.213513 4922 generic.go:334] "Generic (PLEG): container finished" podID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerID="5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5" exitCode=143 Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.213579 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3610ce4-5db9-417f-9998-f1fa664fcfa8","Type":"ContainerDied","Data":"5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5"} Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.215032 4922 generic.go:334] "Generic (PLEG): container finished" podID="21a2e46f-ece9-4faa-bb2e-d040c4501c1d" containerID="cc9707d0d49c0441fa34d40bf19b9503f73b99d595caf2f4c6458d520dd7ea36" exitCode=0 Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.215058 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"21a2e46f-ece9-4faa-bb2e-d040c4501c1d","Type":"ContainerDied","Data":"cc9707d0d49c0441fa34d40bf19b9503f73b99d595caf2f4c6458d520dd7ea36"} Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.305184 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.432587 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-combined-ca-bundle\") pod \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.432634 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-config-data\") pod \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.432818 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wslhq\" (UniqueName: \"kubernetes.io/projected/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-kube-api-access-wslhq\") pod \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\" (UID: \"21a2e46f-ece9-4faa-bb2e-d040c4501c1d\") " Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.456341 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cae3ab8-8889-45f3-81a7-489cb4c28c50" path="/var/lib/kubelet/pods/8cae3ab8-8889-45f3-81a7-489cb4c28c50/volumes" Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.457820 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-kube-api-access-wslhq" (OuterVolumeSpecName: "kube-api-access-wslhq") pod "21a2e46f-ece9-4faa-bb2e-d040c4501c1d" (UID: "21a2e46f-ece9-4faa-bb2e-d040c4501c1d"). InnerVolumeSpecName "kube-api-access-wslhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.512323 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-config-data" (OuterVolumeSpecName: "config-data") pod "21a2e46f-ece9-4faa-bb2e-d040c4501c1d" (UID: "21a2e46f-ece9-4faa-bb2e-d040c4501c1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.520007 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21a2e46f-ece9-4faa-bb2e-d040c4501c1d" (UID: "21a2e46f-ece9-4faa-bb2e-d040c4501c1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.535530 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wslhq\" (UniqueName: \"kubernetes.io/projected/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-kube-api-access-wslhq\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.535567 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:22 crc kubenswrapper[4922]: I0930 00:03:22.535582 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21a2e46f-ece9-4faa-bb2e-d040c4501c1d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.225798 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"21a2e46f-ece9-4faa-bb2e-d040c4501c1d","Type":"ContainerDied","Data":"8946e0e4569b546e0fbc29faecf043a10d9fd5558293900510d9f5106bcfe77f"} Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.225875 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.226744 4922 scope.go:117] "RemoveContainer" containerID="cc9707d0d49c0441fa34d40bf19b9503f73b99d595caf2f4c6458d520dd7ea36" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.285427 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.307540 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.316098 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:03:23 crc kubenswrapper[4922]: E0930 00:03:23.316773 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cae3ab8-8889-45f3-81a7-489cb4c28c50" containerName="init" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.316796 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cae3ab8-8889-45f3-81a7-489cb4c28c50" containerName="init" Sep 30 00:03:23 crc kubenswrapper[4922]: E0930 00:03:23.316818 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cae3ab8-8889-45f3-81a7-489cb4c28c50" containerName="dnsmasq-dns" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.316828 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cae3ab8-8889-45f3-81a7-489cb4c28c50" containerName="dnsmasq-dns" Sep 30 00:03:23 crc kubenswrapper[4922]: E0930 00:03:23.316889 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21a2e46f-ece9-4faa-bb2e-d040c4501c1d" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.316898 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="21a2e46f-ece9-4faa-bb2e-d040c4501c1d" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.317155 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="21a2e46f-ece9-4faa-bb2e-d040c4501c1d" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.317206 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cae3ab8-8889-45f3-81a7-489cb4c28c50" containerName="dnsmasq-dns" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.317996 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.323015 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.327798 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.354997 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jch9z\" (UniqueName: \"kubernetes.io/projected/87a8e261-f388-43d6-b0c3-70694d68aa54-kube-api-access-jch9z\") pod \"nova-cell1-novncproxy-0\" (UID: \"87a8e261-f388-43d6-b0c3-70694d68aa54\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.355085 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87a8e261-f388-43d6-b0c3-70694d68aa54-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"87a8e261-f388-43d6-b0c3-70694d68aa54\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.355152 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87a8e261-f388-43d6-b0c3-70694d68aa54-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"87a8e261-f388-43d6-b0c3-70694d68aa54\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.459792 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87a8e261-f388-43d6-b0c3-70694d68aa54-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"87a8e261-f388-43d6-b0c3-70694d68aa54\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.460027 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jch9z\" (UniqueName: \"kubernetes.io/projected/87a8e261-f388-43d6-b0c3-70694d68aa54-kube-api-access-jch9z\") pod \"nova-cell1-novncproxy-0\" (UID: \"87a8e261-f388-43d6-b0c3-70694d68aa54\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.460110 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87a8e261-f388-43d6-b0c3-70694d68aa54-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"87a8e261-f388-43d6-b0c3-70694d68aa54\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.478094 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87a8e261-f388-43d6-b0c3-70694d68aa54-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"87a8e261-f388-43d6-b0c3-70694d68aa54\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.478295 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87a8e261-f388-43d6-b0c3-70694d68aa54-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"87a8e261-f388-43d6-b0c3-70694d68aa54\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.482079 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jch9z\" (UniqueName: \"kubernetes.io/projected/87a8e261-f388-43d6-b0c3-70694d68aa54-kube-api-access-jch9z\") pod \"nova-cell1-novncproxy-0\" (UID: \"87a8e261-f388-43d6-b0c3-70694d68aa54\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.647005 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.828038 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.867086 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ql7q\" (UniqueName: \"kubernetes.io/projected/4182d7b7-7987-4b76-8325-f312ed9dff54-kube-api-access-5ql7q\") pod \"4182d7b7-7987-4b76-8325-f312ed9dff54\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.867278 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-combined-ca-bundle\") pod \"4182d7b7-7987-4b76-8325-f312ed9dff54\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.867390 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-config-data\") pod \"4182d7b7-7987-4b76-8325-f312ed9dff54\" (UID: \"4182d7b7-7987-4b76-8325-f312ed9dff54\") " Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.872333 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4182d7b7-7987-4b76-8325-f312ed9dff54-kube-api-access-5ql7q" (OuterVolumeSpecName: "kube-api-access-5ql7q") pod "4182d7b7-7987-4b76-8325-f312ed9dff54" (UID: "4182d7b7-7987-4b76-8325-f312ed9dff54"). InnerVolumeSpecName "kube-api-access-5ql7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.896840 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4182d7b7-7987-4b76-8325-f312ed9dff54" (UID: "4182d7b7-7987-4b76-8325-f312ed9dff54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.899853 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-config-data" (OuterVolumeSpecName: "config-data") pod "4182d7b7-7987-4b76-8325-f312ed9dff54" (UID: "4182d7b7-7987-4b76-8325-f312ed9dff54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.969152 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ql7q\" (UniqueName: \"kubernetes.io/projected/4182d7b7-7987-4b76-8325-f312ed9dff54-kube-api-access-5ql7q\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.969413 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:23 crc kubenswrapper[4922]: I0930 00:03:23.969424 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4182d7b7-7987-4b76-8325-f312ed9dff54-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.102898 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.140129 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: W0930 00:03:24.140531 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87a8e261_f388_43d6_b0c3_70694d68aa54.slice/crio-11843978bcc0e1eca77bd9f797f92393b7050f6d2e5d49fb1813a38a35888d11 WatchSource:0}: Error finding container 11843978bcc0e1eca77bd9f797f92393b7050f6d2e5d49fb1813a38a35888d11: Status 404 returned error can't find the container with id 11843978bcc0e1eca77bd9f797f92393b7050f6d2e5d49fb1813a38a35888d11 Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.176755 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-combined-ca-bundle\") pod \"832e746b-286e-4134-8e71-448ee79cf1c5\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.177238 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-config-data\") pod \"832e746b-286e-4134-8e71-448ee79cf1c5\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.177453 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9b5vt\" (UniqueName: \"kubernetes.io/projected/832e746b-286e-4134-8e71-448ee79cf1c5-kube-api-access-9b5vt\") pod \"832e746b-286e-4134-8e71-448ee79cf1c5\" (UID: \"832e746b-286e-4134-8e71-448ee79cf1c5\") " Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.183610 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/832e746b-286e-4134-8e71-448ee79cf1c5-kube-api-access-9b5vt" (OuterVolumeSpecName: "kube-api-access-9b5vt") pod "832e746b-286e-4134-8e71-448ee79cf1c5" (UID: "832e746b-286e-4134-8e71-448ee79cf1c5"). InnerVolumeSpecName "kube-api-access-9b5vt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.206363 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "832e746b-286e-4134-8e71-448ee79cf1c5" (UID: "832e746b-286e-4134-8e71-448ee79cf1c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.221752 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-config-data" (OuterVolumeSpecName: "config-data") pod "832e746b-286e-4134-8e71-448ee79cf1c5" (UID: "832e746b-286e-4134-8e71-448ee79cf1c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.242237 4922 generic.go:334] "Generic (PLEG): container finished" podID="832e746b-286e-4134-8e71-448ee79cf1c5" containerID="df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe" exitCode=0 Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.242311 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"832e746b-286e-4134-8e71-448ee79cf1c5","Type":"ContainerDied","Data":"df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe"} Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.242343 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"832e746b-286e-4134-8e71-448ee79cf1c5","Type":"ContainerDied","Data":"d931a2666c8efe798983819c008288cf0164449884f186b5e53c2ddf3e68e8a8"} Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.242363 4922 scope.go:117] "RemoveContainer" containerID="df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.242516 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.246044 4922 generic.go:334] "Generic (PLEG): container finished" podID="4182d7b7-7987-4b76-8325-f312ed9dff54" containerID="bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5" exitCode=0 Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.246099 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4182d7b7-7987-4b76-8325-f312ed9dff54","Type":"ContainerDied","Data":"bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5"} Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.246104 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.246124 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4182d7b7-7987-4b76-8325-f312ed9dff54","Type":"ContainerDied","Data":"4a6414f400761c9b6a3e26f977fda787c0d15dd2a6157e1418c1b8165387d76b"} Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.248244 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"87a8e261-f388-43d6-b0c3-70694d68aa54","Type":"ContainerStarted","Data":"11843978bcc0e1eca77bd9f797f92393b7050f6d2e5d49fb1813a38a35888d11"} Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.268679 4922 scope.go:117] "RemoveContainer" containerID="df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe" Sep 30 00:03:24 crc kubenswrapper[4922]: E0930 00:03:24.269338 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe\": container with ID starting with df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe not found: ID does not exist" containerID="df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.269502 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe"} err="failed to get container status \"df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe\": rpc error: code = NotFound desc = could not find container \"df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe\": container with ID starting with df30128c79e348430b68457911aec33f4ff411faf5e9da3785512d8bcb338afe not found: ID does not exist" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.269609 4922 scope.go:117] "RemoveContainer" containerID="bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.282474 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.282506 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/832e746b-286e-4134-8e71-448ee79cf1c5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.282516 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9b5vt\" (UniqueName: \"kubernetes.io/projected/832e746b-286e-4134-8e71-448ee79cf1c5-kube-api-access-9b5vt\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.282616 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.301242 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.321237 4922 scope.go:117] "RemoveContainer" containerID="bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5" Sep 30 00:03:24 crc kubenswrapper[4922]: E0930 00:03:24.321662 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5\": container with ID starting with bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5 not found: ID does not exist" containerID="bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.321699 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5"} err="failed to get container status \"bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5\": rpc error: code = NotFound desc = could not find container \"bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5\": container with ID starting with bb6ef069e7d64486fe6d7386b12b127a220b2f721c2a4589fa1d9788f8bdf0e5 not found: ID does not exist" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.330829 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.340226 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.347499 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: E0930 00:03:24.347967 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4182d7b7-7987-4b76-8325-f312ed9dff54" containerName="nova-scheduler-scheduler" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.347988 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4182d7b7-7987-4b76-8325-f312ed9dff54" containerName="nova-scheduler-scheduler" Sep 30 00:03:24 crc kubenswrapper[4922]: E0930 00:03:24.348025 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="832e746b-286e-4134-8e71-448ee79cf1c5" containerName="nova-cell0-conductor-conductor" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.348034 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="832e746b-286e-4134-8e71-448ee79cf1c5" containerName="nova-cell0-conductor-conductor" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.348214 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4182d7b7-7987-4b76-8325-f312ed9dff54" containerName="nova-scheduler-scheduler" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.348234 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="832e746b-286e-4134-8e71-448ee79cf1c5" containerName="nova-cell0-conductor-conductor" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.348958 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.352264 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.355613 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.356944 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.359020 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.366001 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.379440 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.433577 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21a2e46f-ece9-4faa-bb2e-d040c4501c1d" path="/var/lib/kubelet/pods/21a2e46f-ece9-4faa-bb2e-d040c4501c1d/volumes" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.434501 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4182d7b7-7987-4b76-8325-f312ed9dff54" path="/var/lib/kubelet/pods/4182d7b7-7987-4b76-8325-f312ed9dff54/volumes" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.435203 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="832e746b-286e-4134-8e71-448ee79cf1c5" path="/var/lib/kubelet/pods/832e746b-286e-4134-8e71-448ee79cf1c5/volumes" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.485076 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.485478 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.485501 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.485537 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfh2l\" (UniqueName: \"kubernetes.io/projected/ab906156-68b3-4477-aff3-05cba9fe664f-kube-api-access-jfh2l\") pod \"nova-cell0-conductor-0\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.485565 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxl6n\" (UniqueName: \"kubernetes.io/projected/fe2075ef-27e4-4e92-84d3-4178fa974985-kube-api-access-gxl6n\") pod \"nova-scheduler-0\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.485615 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-config-data\") pod \"nova-scheduler-0\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.573060 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.73:8775/\": read tcp 10.217.0.2:58964->10.217.1.73:8775: read: connection reset by peer" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.573102 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.73:8775/\": read tcp 10.217.0.2:58974->10.217.1.73:8775: read: connection reset by peer" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.586940 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.587010 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.587043 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.587095 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfh2l\" (UniqueName: \"kubernetes.io/projected/ab906156-68b3-4477-aff3-05cba9fe664f-kube-api-access-jfh2l\") pod \"nova-cell0-conductor-0\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.587142 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxl6n\" (UniqueName: \"kubernetes.io/projected/fe2075ef-27e4-4e92-84d3-4178fa974985-kube-api-access-gxl6n\") pod \"nova-scheduler-0\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.587221 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-config-data\") pod \"nova-scheduler-0\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.592381 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.592386 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.598167 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-config-data\") pod \"nova-scheduler-0\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.608328 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.609331 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfh2l\" (UniqueName: \"kubernetes.io/projected/ab906156-68b3-4477-aff3-05cba9fe664f-kube-api-access-jfh2l\") pod \"nova-cell0-conductor-0\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.610833 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxl6n\" (UniqueName: \"kubernetes.io/projected/fe2075ef-27e4-4e92-84d3-4178fa974985-kube-api-access-gxl6n\") pod \"nova-scheduler-0\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.680712 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.698955 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.723181 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.723510 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="284f3b0a-8c60-43d4-9ec8-40b2eaab29c6" containerName="nova-cell1-conductor-conductor" containerID="cri-o://83da8fb26fc5825bbd39d469e693b4ee3c3a1b5fbd7a3d1e7f4890a3246d0179" gracePeriod=30 Sep 30 00:03:24 crc kubenswrapper[4922]: I0930 00:03:24.965475 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.112548 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdmvm\" (UniqueName: \"kubernetes.io/projected/eeaba508-3582-4908-9e9c-3dbd53460fe0-kube-api-access-cdmvm\") pod \"eeaba508-3582-4908-9e9c-3dbd53460fe0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.112642 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeaba508-3582-4908-9e9c-3dbd53460fe0-logs\") pod \"eeaba508-3582-4908-9e9c-3dbd53460fe0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.112749 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-combined-ca-bundle\") pod \"eeaba508-3582-4908-9e9c-3dbd53460fe0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.112784 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-config-data\") pod \"eeaba508-3582-4908-9e9c-3dbd53460fe0\" (UID: \"eeaba508-3582-4908-9e9c-3dbd53460fe0\") " Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.114054 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eeaba508-3582-4908-9e9c-3dbd53460fe0-logs" (OuterVolumeSpecName: "logs") pod "eeaba508-3582-4908-9e9c-3dbd53460fe0" (UID: "eeaba508-3582-4908-9e9c-3dbd53460fe0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.135635 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeaba508-3582-4908-9e9c-3dbd53460fe0-kube-api-access-cdmvm" (OuterVolumeSpecName: "kube-api-access-cdmvm") pod "eeaba508-3582-4908-9e9c-3dbd53460fe0" (UID: "eeaba508-3582-4908-9e9c-3dbd53460fe0"). InnerVolumeSpecName "kube-api-access-cdmvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.150271 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-config-data" (OuterVolumeSpecName: "config-data") pod "eeaba508-3582-4908-9e9c-3dbd53460fe0" (UID: "eeaba508-3582-4908-9e9c-3dbd53460fe0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.151345 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.190745 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eeaba508-3582-4908-9e9c-3dbd53460fe0" (UID: "eeaba508-3582-4908-9e9c-3dbd53460fe0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.213817 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-combined-ca-bundle\") pod \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.213861 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxms2\" (UniqueName: \"kubernetes.io/projected/a3610ce4-5db9-417f-9998-f1fa664fcfa8-kube-api-access-gxms2\") pod \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.213961 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-config-data\") pod \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.214054 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3610ce4-5db9-417f-9998-f1fa664fcfa8-logs\") pod \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\" (UID: \"a3610ce4-5db9-417f-9998-f1fa664fcfa8\") " Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.214530 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeaba508-3582-4908-9e9c-3dbd53460fe0-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.214551 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.214563 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeaba508-3582-4908-9e9c-3dbd53460fe0-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.214575 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdmvm\" (UniqueName: \"kubernetes.io/projected/eeaba508-3582-4908-9e9c-3dbd53460fe0-kube-api-access-cdmvm\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.214998 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3610ce4-5db9-417f-9998-f1fa664fcfa8-logs" (OuterVolumeSpecName: "logs") pod "a3610ce4-5db9-417f-9998-f1fa664fcfa8" (UID: "a3610ce4-5db9-417f-9998-f1fa664fcfa8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.219096 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3610ce4-5db9-417f-9998-f1fa664fcfa8-kube-api-access-gxms2" (OuterVolumeSpecName: "kube-api-access-gxms2") pod "a3610ce4-5db9-417f-9998-f1fa664fcfa8" (UID: "a3610ce4-5db9-417f-9998-f1fa664fcfa8"). InnerVolumeSpecName "kube-api-access-gxms2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.252133 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-config-data" (OuterVolumeSpecName: "config-data") pod "a3610ce4-5db9-417f-9998-f1fa664fcfa8" (UID: "a3610ce4-5db9-417f-9998-f1fa664fcfa8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.260552 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3610ce4-5db9-417f-9998-f1fa664fcfa8" (UID: "a3610ce4-5db9-417f-9998-f1fa664fcfa8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.264819 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"87a8e261-f388-43d6-b0c3-70694d68aa54","Type":"ContainerStarted","Data":"e5d8c3e08369511bba620c3a2341d789d5ba7c3fd144a2b1cbe000d636fa04eb"} Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.268055 4922 generic.go:334] "Generic (PLEG): container finished" podID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerID="fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e" exitCode=0 Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.268247 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eeaba508-3582-4908-9e9c-3dbd53460fe0","Type":"ContainerDied","Data":"fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e"} Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.268346 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"eeaba508-3582-4908-9e9c-3dbd53460fe0","Type":"ContainerDied","Data":"cd6a025129f42bcbd98b440942de1cf06f0418a2f193c29e6f9b31714909e6e2"} Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.268456 4922 scope.go:117] "RemoveContainer" containerID="fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.268636 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: W0930 00:03:25.299862 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab906156_68b3_4477_aff3_05cba9fe664f.slice/crio-ddb859d95be8a49470f23aa5f5329cf01bc2a56a643ae454b7f4b76a4a8c67af WatchSource:0}: Error finding container ddb859d95be8a49470f23aa5f5329cf01bc2a56a643ae454b7f4b76a4a8c67af: Status 404 returned error can't find the container with id ddb859d95be8a49470f23aa5f5329cf01bc2a56a643ae454b7f4b76a4a8c67af Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.299935 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.300125 4922 generic.go:334] "Generic (PLEG): container finished" podID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerID="8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed" exitCode=0 Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.300176 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3610ce4-5db9-417f-9998-f1fa664fcfa8","Type":"ContainerDied","Data":"8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed"} Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.300193 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"a3610ce4-5db9-417f-9998-f1fa664fcfa8","Type":"ContainerDied","Data":"5940e9e1e34bcb8ec116b88e3aa23583449b1e1a54abc55b29742dff5c8acbd3"} Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.300205 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.303671 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.3036551960000002 podStartE2EDuration="2.303655196s" podCreationTimestamp="2025-09-30 00:03:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:25.300660792 +0000 UTC m=+5809.610949605" watchObservedRunningTime="2025-09-30 00:03:25.303655196 +0000 UTC m=+5809.613944009" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.315649 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.315682 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxms2\" (UniqueName: \"kubernetes.io/projected/a3610ce4-5db9-417f-9998-f1fa664fcfa8-kube-api-access-gxms2\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.315695 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3610ce4-5db9-417f-9998-f1fa664fcfa8-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.315707 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3610ce4-5db9-417f-9998-f1fa664fcfa8-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.361382 4922 scope.go:117] "RemoveContainer" containerID="2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.372462 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.401647 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.436834 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.451934 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.454538 4922 scope.go:117] "RemoveContainer" containerID="fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e" Sep 30 00:03:25 crc kubenswrapper[4922]: E0930 00:03:25.456594 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e\": container with ID starting with fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e not found: ID does not exist" containerID="fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.456630 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e"} err="failed to get container status \"fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e\": rpc error: code = NotFound desc = could not find container \"fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e\": container with ID starting with fa92a09765ce76ce11977203fe031c27268e572febba0ae7702fdab38a10de3e not found: ID does not exist" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.456655 4922 scope.go:117] "RemoveContainer" containerID="2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa" Sep 30 00:03:25 crc kubenswrapper[4922]: E0930 00:03:25.456927 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa\": container with ID starting with 2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa not found: ID does not exist" containerID="2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.456948 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa"} err="failed to get container status \"2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa\": rpc error: code = NotFound desc = could not find container \"2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa\": container with ID starting with 2ca392dfc66d14e912e2578c9854e96a8b54b9c10446e4716476a437c86d99aa not found: ID does not exist" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.456962 4922 scope.go:117] "RemoveContainer" containerID="8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.506229 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: E0930 00:03:25.507130 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-log" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.507144 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-log" Sep 30 00:03:25 crc kubenswrapper[4922]: E0930 00:03:25.507213 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-metadata" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.507219 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-metadata" Sep 30 00:03:25 crc kubenswrapper[4922]: E0930 00:03:25.507255 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-log" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.507262 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-log" Sep 30 00:03:25 crc kubenswrapper[4922]: E0930 00:03:25.507283 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-api" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.507289 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-api" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.507660 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-metadata" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.507683 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-api" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.507701 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" containerName="nova-metadata-log" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.507719 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" containerName="nova-api-log" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.509218 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.511192 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.519428 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.528822 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.531942 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.553467 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.566071 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.573932 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.636815 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.636894 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npw4n\" (UniqueName: \"kubernetes.io/projected/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-kube-api-access-npw4n\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.636925 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fdc8509-271e-4995-af80-db635ba06700-logs\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.636943 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-logs\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.636963 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b64q5\" (UniqueName: \"kubernetes.io/projected/7fdc8509-271e-4995-af80-db635ba06700-kube-api-access-b64q5\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.637295 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-config-data\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.637322 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.637338 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-config-data\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.640331 4922 scope.go:117] "RemoveContainer" containerID="5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.739069 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fdc8509-271e-4995-af80-db635ba06700-logs\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.739116 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-logs\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.739159 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b64q5\" (UniqueName: \"kubernetes.io/projected/7fdc8509-271e-4995-af80-db635ba06700-kube-api-access-b64q5\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.739225 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-config-data\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.739257 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.739275 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-config-data\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.739353 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.739388 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npw4n\" (UniqueName: \"kubernetes.io/projected/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-kube-api-access-npw4n\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.739580 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fdc8509-271e-4995-af80-db635ba06700-logs\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.740233 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-logs\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.744769 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.744817 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-config-data\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.744824 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-config-data\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.751626 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.760958 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b64q5\" (UniqueName: \"kubernetes.io/projected/7fdc8509-271e-4995-af80-db635ba06700-kube-api-access-b64q5\") pod \"nova-metadata-0\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.761087 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npw4n\" (UniqueName: \"kubernetes.io/projected/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-kube-api-access-npw4n\") pod \"nova-api-0\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " pod="openstack/nova-api-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.804020 4922 scope.go:117] "RemoveContainer" containerID="8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed" Sep 30 00:03:25 crc kubenswrapper[4922]: E0930 00:03:25.804843 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed\": container with ID starting with 8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed not found: ID does not exist" containerID="8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.804886 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed"} err="failed to get container status \"8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed\": rpc error: code = NotFound desc = could not find container \"8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed\": container with ID starting with 8aff2b18e9fa5b479db58eb38fbe4787e971faea8254f756a283da2886522aed not found: ID does not exist" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.804912 4922 scope.go:117] "RemoveContainer" containerID="5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5" Sep 30 00:03:25 crc kubenswrapper[4922]: E0930 00:03:25.805486 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5\": container with ID starting with 5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5 not found: ID does not exist" containerID="5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.805508 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5"} err="failed to get container status \"5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5\": rpc error: code = NotFound desc = could not find container \"5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5\": container with ID starting with 5a6a6c4cb3647946bd1486fa4fa1f172054bfdaca4fc31151194857bbebffba5 not found: ID does not exist" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.825311 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 00:03:25 crc kubenswrapper[4922]: I0930 00:03:25.860598 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.278225 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.333588 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fdc8509-271e-4995-af80-db635ba06700","Type":"ContainerStarted","Data":"60979eb8f2eb13bc22c6c455f6d3b74029107191896063179c88c23f9e8682f0"} Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.341062 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fe2075ef-27e4-4e92-84d3-4178fa974985","Type":"ContainerStarted","Data":"78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a"} Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.341131 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fe2075ef-27e4-4e92-84d3-4178fa974985","Type":"ContainerStarted","Data":"f3e17dbf19c860b14a4c9b01af5caa767383f3474fe3516b5e00ebd1f7c4b1e5"} Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.347607 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ab906156-68b3-4477-aff3-05cba9fe664f","Type":"ContainerStarted","Data":"86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d"} Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.347675 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ab906156-68b3-4477-aff3-05cba9fe664f","Type":"ContainerStarted","Data":"ddb859d95be8a49470f23aa5f5329cf01bc2a56a643ae454b7f4b76a4a8c67af"} Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.347813 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.381315 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.381291125 podStartE2EDuration="2.381291125s" podCreationTimestamp="2025-09-30 00:03:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:26.361913877 +0000 UTC m=+5810.672202690" watchObservedRunningTime="2025-09-30 00:03:26.381291125 +0000 UTC m=+5810.691579938" Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.390308 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.390272117 podStartE2EDuration="2.390272117s" podCreationTimestamp="2025-09-30 00:03:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:26.380836314 +0000 UTC m=+5810.691125127" watchObservedRunningTime="2025-09-30 00:03:26.390272117 +0000 UTC m=+5810.700560930" Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.443285 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3610ce4-5db9-417f-9998-f1fa664fcfa8" path="/var/lib/kubelet/pods/a3610ce4-5db9-417f-9998-f1fa664fcfa8/volumes" Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.444271 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeaba508-3582-4908-9e9c-3dbd53460fe0" path="/var/lib/kubelet/pods/eeaba508-3582-4908-9e9c-3dbd53460fe0/volumes" Sep 30 00:03:26 crc kubenswrapper[4922]: I0930 00:03:26.445061 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.362565 4922 generic.go:334] "Generic (PLEG): container finished" podID="284f3b0a-8c60-43d4-9ec8-40b2eaab29c6" containerID="83da8fb26fc5825bbd39d469e693b4ee3c3a1b5fbd7a3d1e7f4890a3246d0179" exitCode=0 Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.363049 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6","Type":"ContainerDied","Data":"83da8fb26fc5825bbd39d469e693b4ee3c3a1b5fbd7a3d1e7f4890a3246d0179"} Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.373294 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b","Type":"ContainerStarted","Data":"99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a"} Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.373337 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b","Type":"ContainerStarted","Data":"2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60"} Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.373348 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b","Type":"ContainerStarted","Data":"22d1cd90b71673ba7282ecaab8aa1102e71686b45095eeee9435c74653fd5fb5"} Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.380598 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fdc8509-271e-4995-af80-db635ba06700","Type":"ContainerStarted","Data":"99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046"} Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.380670 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fdc8509-271e-4995-af80-db635ba06700","Type":"ContainerStarted","Data":"eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735"} Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.399461 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.399447307 podStartE2EDuration="2.399447307s" podCreationTimestamp="2025-09-30 00:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:27.391806798 +0000 UTC m=+5811.702095611" watchObservedRunningTime="2025-09-30 00:03:27.399447307 +0000 UTC m=+5811.709736120" Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.426992 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.426974135 podStartE2EDuration="2.426974135s" podCreationTimestamp="2025-09-30 00:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:27.417879 +0000 UTC m=+5811.728167833" watchObservedRunningTime="2025-09-30 00:03:27.426974135 +0000 UTC m=+5811.737262948" Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.522694 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.679137 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zc4m\" (UniqueName: \"kubernetes.io/projected/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-kube-api-access-5zc4m\") pod \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.679246 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-config-data\") pod \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.679290 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-combined-ca-bundle\") pod \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\" (UID: \"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6\") " Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.690649 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-kube-api-access-5zc4m" (OuterVolumeSpecName: "kube-api-access-5zc4m") pod "284f3b0a-8c60-43d4-9ec8-40b2eaab29c6" (UID: "284f3b0a-8c60-43d4-9ec8-40b2eaab29c6"). InnerVolumeSpecName "kube-api-access-5zc4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.704686 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-config-data" (OuterVolumeSpecName: "config-data") pod "284f3b0a-8c60-43d4-9ec8-40b2eaab29c6" (UID: "284f3b0a-8c60-43d4-9ec8-40b2eaab29c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.707112 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "284f3b0a-8c60-43d4-9ec8-40b2eaab29c6" (UID: "284f3b0a-8c60-43d4-9ec8-40b2eaab29c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.781303 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zc4m\" (UniqueName: \"kubernetes.io/projected/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-kube-api-access-5zc4m\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.781329 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:27 crc kubenswrapper[4922]: I0930 00:03:27.781340 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.390732 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.390992 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"284f3b0a-8c60-43d4-9ec8-40b2eaab29c6","Type":"ContainerDied","Data":"f6ad361a030683beabdb0efdb5d6a9a5c5dc83d08c37d108758e5f0ae87b63ae"} Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.391056 4922 scope.go:117] "RemoveContainer" containerID="83da8fb26fc5825bbd39d469e693b4ee3c3a1b5fbd7a3d1e7f4890a3246d0179" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.476232 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.489374 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.505039 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:03:28 crc kubenswrapper[4922]: E0930 00:03:28.505771 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284f3b0a-8c60-43d4-9ec8-40b2eaab29c6" containerName="nova-cell1-conductor-conductor" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.505901 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="284f3b0a-8c60-43d4-9ec8-40b2eaab29c6" containerName="nova-cell1-conductor-conductor" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.506351 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="284f3b0a-8c60-43d4-9ec8-40b2eaab29c6" containerName="nova-cell1-conductor-conductor" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.508575 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.510381 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.516175 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.600097 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.600341 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc98g\" (UniqueName: \"kubernetes.io/projected/3814c531-0b90-4d50-bc72-f9c990eeee7e-kube-api-access-vc98g\") pod \"nova-cell1-conductor-0\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.600517 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.647374 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.702758 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc98g\" (UniqueName: \"kubernetes.io/projected/3814c531-0b90-4d50-bc72-f9c990eeee7e-kube-api-access-vc98g\") pod \"nova-cell1-conductor-0\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.703075 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.703162 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.718259 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.718459 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.725750 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc98g\" (UniqueName: \"kubernetes.io/projected/3814c531-0b90-4d50-bc72-f9c990eeee7e-kube-api-access-vc98g\") pod \"nova-cell1-conductor-0\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:28 crc kubenswrapper[4922]: I0930 00:03:28.826870 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:29 crc kubenswrapper[4922]: I0930 00:03:29.257640 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 00:03:29 crc kubenswrapper[4922]: W0930 00:03:29.263655 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3814c531_0b90_4d50_bc72_f9c990eeee7e.slice/crio-13670b28740468c37c4a7d926fd783072b23941afaeb3a439d77dc82a021e46e WatchSource:0}: Error finding container 13670b28740468c37c4a7d926fd783072b23941afaeb3a439d77dc82a021e46e: Status 404 returned error can't find the container with id 13670b28740468c37c4a7d926fd783072b23941afaeb3a439d77dc82a021e46e Sep 30 00:03:29 crc kubenswrapper[4922]: I0930 00:03:29.400139 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3814c531-0b90-4d50-bc72-f9c990eeee7e","Type":"ContainerStarted","Data":"13670b28740468c37c4a7d926fd783072b23941afaeb3a439d77dc82a021e46e"} Sep 30 00:03:29 crc kubenswrapper[4922]: I0930 00:03:29.699711 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 00:03:30 crc kubenswrapper[4922]: I0930 00:03:30.414799 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3814c531-0b90-4d50-bc72-f9c990eeee7e","Type":"ContainerStarted","Data":"9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72"} Sep 30 00:03:30 crc kubenswrapper[4922]: I0930 00:03:30.415364 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:30 crc kubenswrapper[4922]: I0930 00:03:30.434939 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="284f3b0a-8c60-43d4-9ec8-40b2eaab29c6" path="/var/lib/kubelet/pods/284f3b0a-8c60-43d4-9ec8-40b2eaab29c6/volumes" Sep 30 00:03:30 crc kubenswrapper[4922]: I0930 00:03:30.440185 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.440168711 podStartE2EDuration="2.440168711s" podCreationTimestamp="2025-09-30 00:03:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:30.435164317 +0000 UTC m=+5814.745453130" watchObservedRunningTime="2025-09-30 00:03:30.440168711 +0000 UTC m=+5814.750457524" Sep 30 00:03:30 crc kubenswrapper[4922]: I0930 00:03:30.826544 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:03:30 crc kubenswrapper[4922]: I0930 00:03:30.828066 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 00:03:33 crc kubenswrapper[4922]: I0930 00:03:33.647954 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:33 crc kubenswrapper[4922]: I0930 00:03:33.658328 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:34 crc kubenswrapper[4922]: I0930 00:03:34.458099 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 00:03:34 crc kubenswrapper[4922]: I0930 00:03:34.700231 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 00:03:34 crc kubenswrapper[4922]: I0930 00:03:34.713520 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 00:03:34 crc kubenswrapper[4922]: I0930 00:03:34.725419 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 00:03:35 crc kubenswrapper[4922]: I0930 00:03:35.486290 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 00:03:35 crc kubenswrapper[4922]: I0930 00:03:35.827330 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:03:35 crc kubenswrapper[4922]: I0930 00:03:35.827371 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 00:03:35 crc kubenswrapper[4922]: I0930 00:03:35.862511 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:03:35 crc kubenswrapper[4922]: I0930 00:03:35.862560 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 00:03:36 crc kubenswrapper[4922]: I0930 00:03:36.908672 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.84:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:03:36 crc kubenswrapper[4922]: I0930 00:03:36.990672 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.84:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:03:36 crc kubenswrapper[4922]: I0930 00:03:36.990757 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.85:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:03:36 crc kubenswrapper[4922]: I0930 00:03:36.990757 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.85:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 00:03:38 crc kubenswrapper[4922]: I0930 00:03:38.854632 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.518686 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.521201 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.523988 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.534963 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.683547 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-scripts\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.684207 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw796\" (UniqueName: \"kubernetes.io/projected/6d7654b6-1b84-4739-a53a-514e92fb292e-kube-api-access-cw796\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.684230 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d7654b6-1b84-4739-a53a-514e92fb292e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.684264 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.684311 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.684437 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.792199 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.792421 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.792580 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-scripts\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.792654 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw796\" (UniqueName: \"kubernetes.io/projected/6d7654b6-1b84-4739-a53a-514e92fb292e-kube-api-access-cw796\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.792679 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d7654b6-1b84-4739-a53a-514e92fb292e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.792771 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.793001 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d7654b6-1b84-4739-a53a-514e92fb292e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.799776 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.799851 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-scripts\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.799909 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.800237 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.826955 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw796\" (UniqueName: \"kubernetes.io/projected/6d7654b6-1b84-4739-a53a-514e92fb292e-kube-api-access-cw796\") pod \"cinder-scheduler-0\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:41 crc kubenswrapper[4922]: I0930 00:03:41.897910 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:03:42 crc kubenswrapper[4922]: I0930 00:03:42.248455 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:03:42 crc kubenswrapper[4922]: I0930 00:03:42.527551 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6d7654b6-1b84-4739-a53a-514e92fb292e","Type":"ContainerStarted","Data":"7a1537893b0c89ded9c620ea15bd8fd760cc56552f6ddb353e86dd88caee63be"} Sep 30 00:03:42 crc kubenswrapper[4922]: I0930 00:03:42.800004 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:03:42 crc kubenswrapper[4922]: I0930 00:03:42.800414 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerName="cinder-api-log" containerID="cri-o://f8ac858019877d34a1536d81c1e6549821bb23a4ef10e1d8a247ee2e5d25f3e2" gracePeriod=30 Sep 30 00:03:42 crc kubenswrapper[4922]: I0930 00:03:42.800531 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerName="cinder-api" containerID="cri-o://e18bad55480bd38b4b32b8d27428b1eb1405b32af92c736675bd7d9d4405d4ff" gracePeriod=30 Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.385247 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.389190 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.391493 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.422533 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431119 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431163 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431188 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431211 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431234 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-sys\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431404 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-dev\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431461 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431540 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431654 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-run\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431689 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431728 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431773 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431795 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431826 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/df10883e-41b9-46f3-a960-005cba2e6c29-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431865 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p78dk\" (UniqueName: \"kubernetes.io/projected/df10883e-41b9-46f3-a960-005cba2e6c29-kube-api-access-p78dk\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.431978 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.533807 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534163 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534174 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534193 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534236 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-sys\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534260 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534310 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-dev\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534336 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534382 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534477 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-run\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534486 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-sys\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534531 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534489 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-dev\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534503 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534579 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-run\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534594 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534633 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534658 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534695 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/df10883e-41b9-46f3-a960-005cba2e6c29-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534731 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p78dk\" (UniqueName: \"kubernetes.io/projected/df10883e-41b9-46f3-a960-005cba2e6c29-kube-api-access-p78dk\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534690 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534725 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534753 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534825 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.534841 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.535015 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/df10883e-41b9-46f3-a960-005cba2e6c29-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.542777 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.542877 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.543288 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.546993 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df10883e-41b9-46f3-a960-005cba2e6c29-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.547301 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/df10883e-41b9-46f3-a960-005cba2e6c29-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.555200 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p78dk\" (UniqueName: \"kubernetes.io/projected/df10883e-41b9-46f3-a960-005cba2e6c29-kube-api-access-p78dk\") pod \"cinder-volume-volume1-0\" (UID: \"df10883e-41b9-46f3-a960-005cba2e6c29\") " pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.556501 4922 generic.go:334] "Generic (PLEG): container finished" podID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerID="f8ac858019877d34a1536d81c1e6549821bb23a4ef10e1d8a247ee2e5d25f3e2" exitCode=143 Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.556571 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"88daf340-9131-4476-9b5e-7a070d7c7a82","Type":"ContainerDied","Data":"f8ac858019877d34a1536d81c1e6549821bb23a4ef10e1d8a247ee2e5d25f3e2"} Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.558460 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6d7654b6-1b84-4739-a53a-514e92fb292e","Type":"ContainerStarted","Data":"9130c30e28d88081180b11b17bedf75a49308bb8fbcd83355c5c7847064a71ca"} Sep 30 00:03:43 crc kubenswrapper[4922]: I0930 00:03:43.727987 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.015753 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.020614 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.023872 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.031601 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.042888 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.042926 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.042943 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-ceph\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.042978 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043010 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043035 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-sys\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043052 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043069 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-run\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043084 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-etc-nvme\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043102 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-lib-modules\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043122 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-config-data\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043161 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-dev\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043174 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-scripts\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043195 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-config-data-custom\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043225 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.043241 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6wnq\" (UniqueName: \"kubernetes.io/projected/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-kube-api-access-m6wnq\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.143882 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.143946 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.143971 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-ceph\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.143982 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144015 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144065 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144078 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144099 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-sys\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144117 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144124 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144139 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-run\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144155 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144160 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-etc-nvme\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144178 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-run\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144190 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-lib-modules\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144236 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-etc-nvme\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144263 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144309 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-lib-modules\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144318 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-config-data\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144466 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-dev\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144482 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-scripts\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144506 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-config-data-custom\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144524 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144539 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6wnq\" (UniqueName: \"kubernetes.io/projected/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-kube-api-access-m6wnq\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.144907 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-dev\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.145086 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-sys\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.150248 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-scripts\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.150251 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-ceph\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.150353 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-config-data-custom\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.150366 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.150924 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-config-data\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.163863 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6wnq\" (UniqueName: \"kubernetes.io/projected/32ccf717-6bb2-43ce-9fdb-df41eb98bc8f-kube-api-access-m6wnq\") pod \"cinder-backup-0\" (UID: \"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f\") " pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.344550 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.352574 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Sep 30 00:03:44 crc kubenswrapper[4922]: W0930 00:03:44.366256 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf10883e_41b9_46f3_a960_005cba2e6c29.slice/crio-4b0c949b7bafe644c77dd9be8c1f5e433dcddc134f3b77f150a7b2e703e6a061 WatchSource:0}: Error finding container 4b0c949b7bafe644c77dd9be8c1f5e433dcddc134f3b77f150a7b2e703e6a061: Status 404 returned error can't find the container with id 4b0c949b7bafe644c77dd9be8c1f5e433dcddc134f3b77f150a7b2e703e6a061 Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.378360 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.574266 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6d7654b6-1b84-4739-a53a-514e92fb292e","Type":"ContainerStarted","Data":"2a9835bf87f41ffcf1b7deed3c98486eb1e5e5bd9aa5592023f28b2b878f00b5"} Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.576861 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"df10883e-41b9-46f3-a960-005cba2e6c29","Type":"ContainerStarted","Data":"4b0c949b7bafe644c77dd9be8c1f5e433dcddc134f3b77f150a7b2e703e6a061"} Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.603489 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.6034694739999997 podStartE2EDuration="3.603469474s" podCreationTimestamp="2025-09-30 00:03:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:44.593353675 +0000 UTC m=+5828.903642498" watchObservedRunningTime="2025-09-30 00:03:44.603469474 +0000 UTC m=+5828.913758287" Sep 30 00:03:44 crc kubenswrapper[4922]: I0930 00:03:44.869593 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.587512 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f","Type":"ContainerStarted","Data":"cf71e925cfbe9d44ad405ccb52e9f73f9a5d55ecf25de5601f74d0770f485c84"} Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.828861 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.828981 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.831042 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.831846 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.882019 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.883261 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.885706 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.888492 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 00:03:45 crc kubenswrapper[4922]: I0930 00:03:45.965304 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.80:8776/healthcheck\": read tcp 10.217.0.2:33332->10.217.1.80:8776: read: connection reset by peer" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.602520 4922 generic.go:334] "Generic (PLEG): container finished" podID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerID="e18bad55480bd38b4b32b8d27428b1eb1405b32af92c736675bd7d9d4405d4ff" exitCode=0 Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.602604 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"88daf340-9131-4476-9b5e-7a070d7c7a82","Type":"ContainerDied","Data":"e18bad55480bd38b4b32b8d27428b1eb1405b32af92c736675bd7d9d4405d4ff"} Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.603208 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"88daf340-9131-4476-9b5e-7a070d7c7a82","Type":"ContainerDied","Data":"9bdab6ca708940e1a0b51d9d3f69fb7caa9698d81fae9b291e35dd8f4bc5bfa0"} Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.603230 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bdab6ca708940e1a0b51d9d3f69fb7caa9698d81fae9b291e35dd8f4bc5bfa0" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.604296 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.607369 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.637765 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.797903 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-scripts\") pod \"88daf340-9131-4476-9b5e-7a070d7c7a82\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.797951 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/88daf340-9131-4476-9b5e-7a070d7c7a82-etc-machine-id\") pod \"88daf340-9131-4476-9b5e-7a070d7c7a82\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.798125 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88daf340-9131-4476-9b5e-7a070d7c7a82-logs\") pod \"88daf340-9131-4476-9b5e-7a070d7c7a82\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.798164 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-combined-ca-bundle\") pod \"88daf340-9131-4476-9b5e-7a070d7c7a82\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.798211 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data-custom\") pod \"88daf340-9131-4476-9b5e-7a070d7c7a82\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.798230 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data\") pod \"88daf340-9131-4476-9b5e-7a070d7c7a82\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.798257 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67kks\" (UniqueName: \"kubernetes.io/projected/88daf340-9131-4476-9b5e-7a070d7c7a82-kube-api-access-67kks\") pod \"88daf340-9131-4476-9b5e-7a070d7c7a82\" (UID: \"88daf340-9131-4476-9b5e-7a070d7c7a82\") " Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.814283 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/88daf340-9131-4476-9b5e-7a070d7c7a82-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "88daf340-9131-4476-9b5e-7a070d7c7a82" (UID: "88daf340-9131-4476-9b5e-7a070d7c7a82"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.814732 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88daf340-9131-4476-9b5e-7a070d7c7a82-logs" (OuterVolumeSpecName: "logs") pod "88daf340-9131-4476-9b5e-7a070d7c7a82" (UID: "88daf340-9131-4476-9b5e-7a070d7c7a82"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.815191 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "88daf340-9131-4476-9b5e-7a070d7c7a82" (UID: "88daf340-9131-4476-9b5e-7a070d7c7a82"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.815288 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88daf340-9131-4476-9b5e-7a070d7c7a82-kube-api-access-67kks" (OuterVolumeSpecName: "kube-api-access-67kks") pod "88daf340-9131-4476-9b5e-7a070d7c7a82" (UID: "88daf340-9131-4476-9b5e-7a070d7c7a82"). InnerVolumeSpecName "kube-api-access-67kks". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.821595 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-scripts" (OuterVolumeSpecName: "scripts") pod "88daf340-9131-4476-9b5e-7a070d7c7a82" (UID: "88daf340-9131-4476-9b5e-7a070d7c7a82"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.846780 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88daf340-9131-4476-9b5e-7a070d7c7a82" (UID: "88daf340-9131-4476-9b5e-7a070d7c7a82"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.859882 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data" (OuterVolumeSpecName: "config-data") pod "88daf340-9131-4476-9b5e-7a070d7c7a82" (UID: "88daf340-9131-4476-9b5e-7a070d7c7a82"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.898674 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.901403 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/88daf340-9131-4476-9b5e-7a070d7c7a82-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.901436 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.901450 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.901458 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.901469 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67kks\" (UniqueName: \"kubernetes.io/projected/88daf340-9131-4476-9b5e-7a070d7c7a82-kube-api-access-67kks\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.901479 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88daf340-9131-4476-9b5e-7a070d7c7a82-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:46 crc kubenswrapper[4922]: I0930 00:03:46.901489 4922 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/88daf340-9131-4476-9b5e-7a070d7c7a82-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.616763 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"df10883e-41b9-46f3-a960-005cba2e6c29","Type":"ContainerStarted","Data":"7676c50b8868b6a401ec4182465d63c7ca3ee4d464a2569f7f7294ef0bedbc1f"} Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.617162 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"df10883e-41b9-46f3-a960-005cba2e6c29","Type":"ContainerStarted","Data":"ff46f387f94e9fe5bc84aa4300251035c6d02f55da74e9a4a78f05802421844c"} Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.620059 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f","Type":"ContainerStarted","Data":"91b573df13b8687486769052c7b318dbe0f71be81967fd692c83b76d14fcb03f"} Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.620098 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"32ccf717-6bb2-43ce-9fdb-df41eb98bc8f","Type":"ContainerStarted","Data":"570bda71c976ea9085d076bd1abc331631a6f1beb1a9240f923db05f157f24e2"} Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.620322 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.648953 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=2.768863148 podStartE2EDuration="4.648935495s" podCreationTimestamp="2025-09-30 00:03:43 +0000 UTC" firstStartedPulling="2025-09-30 00:03:44.378084963 +0000 UTC m=+5828.688373776" lastFinishedPulling="2025-09-30 00:03:46.25815731 +0000 UTC m=+5830.568446123" observedRunningTime="2025-09-30 00:03:47.644137297 +0000 UTC m=+5831.954426110" watchObservedRunningTime="2025-09-30 00:03:47.648935495 +0000 UTC m=+5831.959224308" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.678606 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.178155117 podStartE2EDuration="4.678583917s" podCreationTimestamp="2025-09-30 00:03:43 +0000 UTC" firstStartedPulling="2025-09-30 00:03:44.882879768 +0000 UTC m=+5829.193168581" lastFinishedPulling="2025-09-30 00:03:46.383308568 +0000 UTC m=+5830.693597381" observedRunningTime="2025-09-30 00:03:47.675814349 +0000 UTC m=+5831.986103162" watchObservedRunningTime="2025-09-30 00:03:47.678583917 +0000 UTC m=+5831.988872770" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.723895 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.735876 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.755439 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:03:47 crc kubenswrapper[4922]: E0930 00:03:47.756106 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerName="cinder-api-log" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.756141 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerName="cinder-api-log" Sep 30 00:03:47 crc kubenswrapper[4922]: E0930 00:03:47.756221 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerName="cinder-api" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.756236 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerName="cinder-api" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.756565 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerName="cinder-api" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.756643 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" containerName="cinder-api-log" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.758549 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.761942 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.768288 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.921430 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/290f521e-93e4-412d-9724-40f850a4702a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.921489 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/290f521e-93e4-412d-9724-40f850a4702a-logs\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.921655 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-config-data-custom\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.921733 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-config-data\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.921771 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.921883 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-scripts\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:47 crc kubenswrapper[4922]: I0930 00:03:47.921939 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nm44\" (UniqueName: \"kubernetes.io/projected/290f521e-93e4-412d-9724-40f850a4702a-kube-api-access-7nm44\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.024113 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/290f521e-93e4-412d-9724-40f850a4702a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.024151 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/290f521e-93e4-412d-9724-40f850a4702a-logs\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.024203 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-config-data-custom\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.024230 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-config-data\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.024246 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.024270 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-scripts\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.024292 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nm44\" (UniqueName: \"kubernetes.io/projected/290f521e-93e4-412d-9724-40f850a4702a-kube-api-access-7nm44\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.024694 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/290f521e-93e4-412d-9724-40f850a4702a-logs\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.024813 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/290f521e-93e4-412d-9724-40f850a4702a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.030890 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-scripts\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.031267 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.031545 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-config-data\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.036938 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/290f521e-93e4-412d-9724-40f850a4702a-config-data-custom\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.040130 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nm44\" (UniqueName: \"kubernetes.io/projected/290f521e-93e4-412d-9724-40f850a4702a-kube-api-access-7nm44\") pod \"cinder-api-0\" (UID: \"290f521e-93e4-412d-9724-40f850a4702a\") " pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.074381 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.451116 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88daf340-9131-4476-9b5e-7a070d7c7a82" path="/var/lib/kubelet/pods/88daf340-9131-4476-9b5e-7a070d7c7a82/volumes" Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.541425 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 00:03:48 crc kubenswrapper[4922]: W0930 00:03:48.546211 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod290f521e_93e4_412d_9724_40f850a4702a.slice/crio-1d11bdf28c7baa91699ddc7e0f95e3aac898f5fe07e77db26fea6c8c7344903f WatchSource:0}: Error finding container 1d11bdf28c7baa91699ddc7e0f95e3aac898f5fe07e77db26fea6c8c7344903f: Status 404 returned error can't find the container with id 1d11bdf28c7baa91699ddc7e0f95e3aac898f5fe07e77db26fea6c8c7344903f Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.661591 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"290f521e-93e4-412d-9724-40f850a4702a","Type":"ContainerStarted","Data":"1d11bdf28c7baa91699ddc7e0f95e3aac898f5fe07e77db26fea6c8c7344903f"} Sep 30 00:03:48 crc kubenswrapper[4922]: I0930 00:03:48.728905 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:49 crc kubenswrapper[4922]: I0930 00:03:49.345426 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Sep 30 00:03:49 crc kubenswrapper[4922]: I0930 00:03:49.675334 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"290f521e-93e4-412d-9724-40f850a4702a","Type":"ContainerStarted","Data":"8b4bb3af4d16e3435e6ccb5d01f6b87993fc8f5f6a571bb7a1ecb50481034ca9"} Sep 30 00:03:49 crc kubenswrapper[4922]: I0930 00:03:49.675651 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"290f521e-93e4-412d-9724-40f850a4702a","Type":"ContainerStarted","Data":"cfad667225151c76b51a2180f19d718ab691dace7101966c83d94887eed6eead"} Sep 30 00:03:49 crc kubenswrapper[4922]: I0930 00:03:49.697635 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.6976170120000003 podStartE2EDuration="2.697617012s" podCreationTimestamp="2025-09-30 00:03:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:49.689227615 +0000 UTC m=+5833.999516428" watchObservedRunningTime="2025-09-30 00:03:49.697617012 +0000 UTC m=+5834.007905825" Sep 30 00:03:50 crc kubenswrapper[4922]: I0930 00:03:50.684623 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 00:03:52 crc kubenswrapper[4922]: I0930 00:03:52.139060 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 00:03:52 crc kubenswrapper[4922]: I0930 00:03:52.242329 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:03:52 crc kubenswrapper[4922]: I0930 00:03:52.704017 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerName="cinder-scheduler" containerID="cri-o://9130c30e28d88081180b11b17bedf75a49308bb8fbcd83355c5c7847064a71ca" gracePeriod=30 Sep 30 00:03:52 crc kubenswrapper[4922]: I0930 00:03:52.704088 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerName="probe" containerID="cri-o://2a9835bf87f41ffcf1b7deed3c98486eb1e5e5bd9aa5592023f28b2b878f00b5" gracePeriod=30 Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.727041 4922 generic.go:334] "Generic (PLEG): container finished" podID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerID="2a9835bf87f41ffcf1b7deed3c98486eb1e5e5bd9aa5592023f28b2b878f00b5" exitCode=0 Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.727536 4922 generic.go:334] "Generic (PLEG): container finished" podID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerID="9130c30e28d88081180b11b17bedf75a49308bb8fbcd83355c5c7847064a71ca" exitCode=0 Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.727108 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6d7654b6-1b84-4739-a53a-514e92fb292e","Type":"ContainerDied","Data":"2a9835bf87f41ffcf1b7deed3c98486eb1e5e5bd9aa5592023f28b2b878f00b5"} Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.727587 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6d7654b6-1b84-4739-a53a-514e92fb292e","Type":"ContainerDied","Data":"9130c30e28d88081180b11b17bedf75a49308bb8fbcd83355c5c7847064a71ca"} Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.879493 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.947833 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.959120 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6d7654b6-1b84-4739-a53a-514e92fb292e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6d7654b6-1b84-4739-a53a-514e92fb292e" (UID: "6d7654b6-1b84-4739-a53a-514e92fb292e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.959172 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d7654b6-1b84-4739-a53a-514e92fb292e-etc-machine-id\") pod \"6d7654b6-1b84-4739-a53a-514e92fb292e\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.959250 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-scripts\") pod \"6d7654b6-1b84-4739-a53a-514e92fb292e\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.959290 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data-custom\") pod \"6d7654b6-1b84-4739-a53a-514e92fb292e\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.959314 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw796\" (UniqueName: \"kubernetes.io/projected/6d7654b6-1b84-4739-a53a-514e92fb292e-kube-api-access-cw796\") pod \"6d7654b6-1b84-4739-a53a-514e92fb292e\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.959370 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-combined-ca-bundle\") pod \"6d7654b6-1b84-4739-a53a-514e92fb292e\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.959464 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data\") pod \"6d7654b6-1b84-4739-a53a-514e92fb292e\" (UID: \"6d7654b6-1b84-4739-a53a-514e92fb292e\") " Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.959861 4922 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6d7654b6-1b84-4739-a53a-514e92fb292e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.970912 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d7654b6-1b84-4739-a53a-514e92fb292e-kube-api-access-cw796" (OuterVolumeSpecName: "kube-api-access-cw796") pod "6d7654b6-1b84-4739-a53a-514e92fb292e" (UID: "6d7654b6-1b84-4739-a53a-514e92fb292e"). InnerVolumeSpecName "kube-api-access-cw796". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.974788 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6d7654b6-1b84-4739-a53a-514e92fb292e" (UID: "6d7654b6-1b84-4739-a53a-514e92fb292e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:53 crc kubenswrapper[4922]: I0930 00:03:53.981957 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-scripts" (OuterVolumeSpecName: "scripts") pod "6d7654b6-1b84-4739-a53a-514e92fb292e" (UID: "6d7654b6-1b84-4739-a53a-514e92fb292e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.053718 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d7654b6-1b84-4739-a53a-514e92fb292e" (UID: "6d7654b6-1b84-4739-a53a-514e92fb292e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.061444 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.061483 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.061495 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw796\" (UniqueName: \"kubernetes.io/projected/6d7654b6-1b84-4739-a53a-514e92fb292e-kube-api-access-cw796\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.061506 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.062595 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data" (OuterVolumeSpecName: "config-data") pod "6d7654b6-1b84-4739-a53a-514e92fb292e" (UID: "6d7654b6-1b84-4739-a53a-514e92fb292e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.163004 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6d7654b6-1b84-4739-a53a-514e92fb292e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.592570 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.740615 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6d7654b6-1b84-4739-a53a-514e92fb292e","Type":"ContainerDied","Data":"7a1537893b0c89ded9c620ea15bd8fd760cc56552f6ddb353e86dd88caee63be"} Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.740926 4922 scope.go:117] "RemoveContainer" containerID="2a9835bf87f41ffcf1b7deed3c98486eb1e5e5bd9aa5592023f28b2b878f00b5" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.740685 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.781488 4922 scope.go:117] "RemoveContainer" containerID="9130c30e28d88081180b11b17bedf75a49308bb8fbcd83355c5c7847064a71ca" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.784918 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.795644 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.815416 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:03:54 crc kubenswrapper[4922]: E0930 00:03:54.815892 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerName="cinder-scheduler" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.815912 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerName="cinder-scheduler" Sep 30 00:03:54 crc kubenswrapper[4922]: E0930 00:03:54.815959 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerName="probe" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.815967 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerName="probe" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.816189 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerName="cinder-scheduler" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.816210 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d7654b6-1b84-4739-a53a-514e92fb292e" containerName="probe" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.817221 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.822756 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 00:03:54 crc kubenswrapper[4922]: I0930 00:03:54.824369 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.000330 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-scripts\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.000425 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xs7r8\" (UniqueName: \"kubernetes.io/projected/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-kube-api-access-xs7r8\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.000474 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.000577 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.000620 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-config-data\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.000649 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.102648 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-scripts\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.102696 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xs7r8\" (UniqueName: \"kubernetes.io/projected/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-kube-api-access-xs7r8\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.102730 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.102817 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.102847 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-config-data\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.102868 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.103669 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.108160 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.110090 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-config-data\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.122684 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-scripts\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.128849 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xs7r8\" (UniqueName: \"kubernetes.io/projected/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-kube-api-access-xs7r8\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.130848 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b33a9fd-6cb6-408b-83c8-79605b21f4c0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"6b33a9fd-6cb6-408b-83c8-79605b21f4c0\") " pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.139198 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.659828 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 00:03:55 crc kubenswrapper[4922]: I0930 00:03:55.754534 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6b33a9fd-6cb6-408b-83c8-79605b21f4c0","Type":"ContainerStarted","Data":"bef1ea9e1b6a7cf09db8e2b7cead361c74dc6df1cb112685f8a9b01f43fa1694"} Sep 30 00:03:56 crc kubenswrapper[4922]: I0930 00:03:56.448690 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d7654b6-1b84-4739-a53a-514e92fb292e" path="/var/lib/kubelet/pods/6d7654b6-1b84-4739-a53a-514e92fb292e/volumes" Sep 30 00:03:56 crc kubenswrapper[4922]: I0930 00:03:56.780012 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6b33a9fd-6cb6-408b-83c8-79605b21f4c0","Type":"ContainerStarted","Data":"300be6a0066adbbfab33d5592ea44bbfc2c9d06e775efcd5cc868adf5acb6c38"} Sep 30 00:03:57 crc kubenswrapper[4922]: I0930 00:03:57.795088 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"6b33a9fd-6cb6-408b-83c8-79605b21f4c0","Type":"ContainerStarted","Data":"a4b4eea69850c9eebd5ad4a44513e2fd5e2173721826cde0e5d009ed98e976a5"} Sep 30 00:03:57 crc kubenswrapper[4922]: I0930 00:03:57.825064 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.8250454019999998 podStartE2EDuration="3.825045402s" podCreationTimestamp="2025-09-30 00:03:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:03:57.817833144 +0000 UTC m=+5842.128121957" watchObservedRunningTime="2025-09-30 00:03:57.825045402 +0000 UTC m=+5842.135334215" Sep 30 00:03:59 crc kubenswrapper[4922]: I0930 00:03:59.894570 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 00:04:00 crc kubenswrapper[4922]: I0930 00:04:00.139490 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 00:04:05 crc kubenswrapper[4922]: I0930 00:04:05.390043 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 00:04:21 crc kubenswrapper[4922]: I0930 00:04:21.052782 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-m57px"] Sep 30 00:04:21 crc kubenswrapper[4922]: I0930 00:04:21.058985 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-m57px"] Sep 30 00:04:22 crc kubenswrapper[4922]: I0930 00:04:22.438577 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad146be8-a637-4ae4-b353-4cb9e36b4d74" path="/var/lib/kubelet/pods/ad146be8-a637-4ae4-b353-4cb9e36b4d74/volumes" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.650185 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5xjvh"] Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.657824 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.679669 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5xjvh"] Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.817431 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-catalog-content\") pod \"redhat-operators-5xjvh\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.817510 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8696\" (UniqueName: \"kubernetes.io/projected/25c55198-7d39-4ee0-85fe-e6065f32d89b-kube-api-access-f8696\") pod \"redhat-operators-5xjvh\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.817595 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-utilities\") pod \"redhat-operators-5xjvh\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.919259 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-utilities\") pod \"redhat-operators-5xjvh\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.919550 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-catalog-content\") pod \"redhat-operators-5xjvh\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.919595 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8696\" (UniqueName: \"kubernetes.io/projected/25c55198-7d39-4ee0-85fe-e6065f32d89b-kube-api-access-f8696\") pod \"redhat-operators-5xjvh\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.919858 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-utilities\") pod \"redhat-operators-5xjvh\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.919871 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-catalog-content\") pod \"redhat-operators-5xjvh\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.940271 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8696\" (UniqueName: \"kubernetes.io/projected/25c55198-7d39-4ee0-85fe-e6065f32d89b-kube-api-access-f8696\") pod \"redhat-operators-5xjvh\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:27 crc kubenswrapper[4922]: I0930 00:04:27.984378 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:28 crc kubenswrapper[4922]: I0930 00:04:28.513520 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5xjvh"] Sep 30 00:04:29 crc kubenswrapper[4922]: I0930 00:04:29.120984 4922 generic.go:334] "Generic (PLEG): container finished" podID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerID="1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde" exitCode=0 Sep 30 00:04:29 crc kubenswrapper[4922]: I0930 00:04:29.121029 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xjvh" event={"ID":"25c55198-7d39-4ee0-85fe-e6065f32d89b","Type":"ContainerDied","Data":"1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde"} Sep 30 00:04:29 crc kubenswrapper[4922]: I0930 00:04:29.121055 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xjvh" event={"ID":"25c55198-7d39-4ee0-85fe-e6065f32d89b","Type":"ContainerStarted","Data":"097de548fd0b449df6a6bdd520c4f590d7bbfccecf772fe2d6f6375467fffd71"} Sep 30 00:04:31 crc kubenswrapper[4922]: I0930 00:04:31.049013 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-f160-account-create-k5ngt"] Sep 30 00:04:31 crc kubenswrapper[4922]: I0930 00:04:31.060087 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-f160-account-create-k5ngt"] Sep 30 00:04:31 crc kubenswrapper[4922]: I0930 00:04:31.155129 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xjvh" event={"ID":"25c55198-7d39-4ee0-85fe-e6065f32d89b","Type":"ContainerStarted","Data":"f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015"} Sep 30 00:04:32 crc kubenswrapper[4922]: I0930 00:04:32.443539 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b79e6c5a-7a81-448e-babc-d7d764ad0650" path="/var/lib/kubelet/pods/b79e6c5a-7a81-448e-babc-d7d764ad0650/volumes" Sep 30 00:04:33 crc kubenswrapper[4922]: I0930 00:04:33.189030 4922 generic.go:334] "Generic (PLEG): container finished" podID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerID="f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015" exitCode=0 Sep 30 00:04:33 crc kubenswrapper[4922]: I0930 00:04:33.189089 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xjvh" event={"ID":"25c55198-7d39-4ee0-85fe-e6065f32d89b","Type":"ContainerDied","Data":"f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015"} Sep 30 00:04:34 crc kubenswrapper[4922]: I0930 00:04:34.200937 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xjvh" event={"ID":"25c55198-7d39-4ee0-85fe-e6065f32d89b","Type":"ContainerStarted","Data":"377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581"} Sep 30 00:04:34 crc kubenswrapper[4922]: I0930 00:04:34.225710 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5xjvh" podStartSLOduration=2.450934292 podStartE2EDuration="7.225695511s" podCreationTimestamp="2025-09-30 00:04:27 +0000 UTC" firstStartedPulling="2025-09-30 00:04:29.123732559 +0000 UTC m=+5873.434021372" lastFinishedPulling="2025-09-30 00:04:33.898493788 +0000 UTC m=+5878.208782591" observedRunningTime="2025-09-30 00:04:34.222121203 +0000 UTC m=+5878.532410036" watchObservedRunningTime="2025-09-30 00:04:34.225695511 +0000 UTC m=+5878.535984314" Sep 30 00:04:37 crc kubenswrapper[4922]: I0930 00:04:37.985488 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:37 crc kubenswrapper[4922]: I0930 00:04:37.986104 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:38 crc kubenswrapper[4922]: I0930 00:04:38.033901 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-v4vgm"] Sep 30 00:04:38 crc kubenswrapper[4922]: I0930 00:04:38.045996 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-v4vgm"] Sep 30 00:04:38 crc kubenswrapper[4922]: I0930 00:04:38.432942 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b85e9b7-babd-4976-899c-58b5cf1b4551" path="/var/lib/kubelet/pods/8b85e9b7-babd-4976-899c-58b5cf1b4551/volumes" Sep 30 00:04:39 crc kubenswrapper[4922]: I0930 00:04:39.067513 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-5xjvh" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerName="registry-server" probeResult="failure" output=< Sep 30 00:04:39 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 00:04:39 crc kubenswrapper[4922]: > Sep 30 00:04:48 crc kubenswrapper[4922]: I0930 00:04:48.075620 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:48 crc kubenswrapper[4922]: I0930 00:04:48.157816 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:48 crc kubenswrapper[4922]: I0930 00:04:48.325070 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5xjvh"] Sep 30 00:04:49 crc kubenswrapper[4922]: I0930 00:04:49.366050 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5xjvh" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerName="registry-server" containerID="cri-o://377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581" gracePeriod=2 Sep 30 00:04:49 crc kubenswrapper[4922]: I0930 00:04:49.804920 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:49 crc kubenswrapper[4922]: I0930 00:04:49.964564 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8696\" (UniqueName: \"kubernetes.io/projected/25c55198-7d39-4ee0-85fe-e6065f32d89b-kube-api-access-f8696\") pod \"25c55198-7d39-4ee0-85fe-e6065f32d89b\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " Sep 30 00:04:49 crc kubenswrapper[4922]: I0930 00:04:49.965551 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-utilities\") pod \"25c55198-7d39-4ee0-85fe-e6065f32d89b\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " Sep 30 00:04:49 crc kubenswrapper[4922]: I0930 00:04:49.965611 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-catalog-content\") pod \"25c55198-7d39-4ee0-85fe-e6065f32d89b\" (UID: \"25c55198-7d39-4ee0-85fe-e6065f32d89b\") " Sep 30 00:04:49 crc kubenswrapper[4922]: I0930 00:04:49.966972 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-utilities" (OuterVolumeSpecName: "utilities") pod "25c55198-7d39-4ee0-85fe-e6065f32d89b" (UID: "25c55198-7d39-4ee0-85fe-e6065f32d89b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:04:49 crc kubenswrapper[4922]: I0930 00:04:49.973568 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25c55198-7d39-4ee0-85fe-e6065f32d89b-kube-api-access-f8696" (OuterVolumeSpecName: "kube-api-access-f8696") pod "25c55198-7d39-4ee0-85fe-e6065f32d89b" (UID: "25c55198-7d39-4ee0-85fe-e6065f32d89b"). InnerVolumeSpecName "kube-api-access-f8696". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.068902 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8696\" (UniqueName: \"kubernetes.io/projected/25c55198-7d39-4ee0-85fe-e6065f32d89b-kube-api-access-f8696\") on node \"crc\" DevicePath \"\"" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.068989 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.073996 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "25c55198-7d39-4ee0-85fe-e6065f32d89b" (UID: "25c55198-7d39-4ee0-85fe-e6065f32d89b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.170581 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25c55198-7d39-4ee0-85fe-e6065f32d89b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.379716 4922 generic.go:334] "Generic (PLEG): container finished" podID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerID="377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581" exitCode=0 Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.379803 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5xjvh" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.379785 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xjvh" event={"ID":"25c55198-7d39-4ee0-85fe-e6065f32d89b","Type":"ContainerDied","Data":"377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581"} Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.379891 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5xjvh" event={"ID":"25c55198-7d39-4ee0-85fe-e6065f32d89b","Type":"ContainerDied","Data":"097de548fd0b449df6a6bdd520c4f590d7bbfccecf772fe2d6f6375467fffd71"} Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.379926 4922 scope.go:117] "RemoveContainer" containerID="377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.413375 4922 scope.go:117] "RemoveContainer" containerID="f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.440058 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5xjvh"] Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.449784 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5xjvh"] Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.454178 4922 scope.go:117] "RemoveContainer" containerID="1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.506885 4922 scope.go:117] "RemoveContainer" containerID="377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581" Sep 30 00:04:50 crc kubenswrapper[4922]: E0930 00:04:50.507614 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581\": container with ID starting with 377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581 not found: ID does not exist" containerID="377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.507652 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581"} err="failed to get container status \"377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581\": rpc error: code = NotFound desc = could not find container \"377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581\": container with ID starting with 377d2ba17c550bcd32e629733a9b3a307be6ae8ff4642f79b038a6ddac482581 not found: ID does not exist" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.507676 4922 scope.go:117] "RemoveContainer" containerID="f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015" Sep 30 00:04:50 crc kubenswrapper[4922]: E0930 00:04:50.508271 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015\": container with ID starting with f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015 not found: ID does not exist" containerID="f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.508301 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015"} err="failed to get container status \"f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015\": rpc error: code = NotFound desc = could not find container \"f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015\": container with ID starting with f22fd4769de020d7340bfff5333b0da725100e99e5cf1a5b8b6ce2bac7e6a015 not found: ID does not exist" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.508317 4922 scope.go:117] "RemoveContainer" containerID="1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde" Sep 30 00:04:50 crc kubenswrapper[4922]: E0930 00:04:50.508918 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde\": container with ID starting with 1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde not found: ID does not exist" containerID="1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde" Sep 30 00:04:50 crc kubenswrapper[4922]: I0930 00:04:50.508982 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde"} err="failed to get container status \"1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde\": rpc error: code = NotFound desc = could not find container \"1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde\": container with ID starting with 1b272432dd12717089ff42630592c167f8bae5e3a18968dddfdaa0e69f70ffde not found: ID does not exist" Sep 30 00:04:51 crc kubenswrapper[4922]: I0930 00:04:51.063100 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-plwj5"] Sep 30 00:04:51 crc kubenswrapper[4922]: I0930 00:04:51.076796 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-plwj5"] Sep 30 00:04:52 crc kubenswrapper[4922]: I0930 00:04:52.441323 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" path="/var/lib/kubelet/pods/25c55198-7d39-4ee0-85fe-e6065f32d89b/volumes" Sep 30 00:04:52 crc kubenswrapper[4922]: I0930 00:04:52.442327 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5c74b25-a885-423b-9383-711519104495" path="/var/lib/kubelet/pods/c5c74b25-a885-423b-9383-711519104495/volumes" Sep 30 00:04:53 crc kubenswrapper[4922]: I0930 00:04:53.446197 4922 scope.go:117] "RemoveContainer" containerID="07de248981f6e77a2883923993ffab71ac32dde645b81bbf70edb0ad1eda6425" Sep 30 00:04:53 crc kubenswrapper[4922]: I0930 00:04:53.493210 4922 scope.go:117] "RemoveContainer" containerID="8508fd4ead24194ad605678d39e01c7200e6598497999c3ac33d8a39531b199f" Sep 30 00:04:53 crc kubenswrapper[4922]: I0930 00:04:53.566650 4922 scope.go:117] "RemoveContainer" containerID="2fb38785f8873dac286f64519819801e58b3aed0e65f7442c98521139007a87d" Sep 30 00:04:53 crc kubenswrapper[4922]: I0930 00:04:53.638426 4922 scope.go:117] "RemoveContainer" containerID="0e4f104bc48fe041d1a0cbe1d669cfa1eeb8f0d3f288bd5f805117e16c872d5e" Sep 30 00:05:28 crc kubenswrapper[4922]: I0930 00:05:28.912690 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:05:28 crc kubenswrapper[4922]: I0930 00:05:28.913678 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.641210 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-wcjrw"] Sep 30 00:05:47 crc kubenswrapper[4922]: E0930 00:05:47.642304 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerName="registry-server" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.642324 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerName="registry-server" Sep 30 00:05:47 crc kubenswrapper[4922]: E0930 00:05:47.642347 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerName="extract-content" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.642356 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerName="extract-content" Sep 30 00:05:47 crc kubenswrapper[4922]: E0930 00:05:47.642378 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerName="extract-utilities" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.642412 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerName="extract-utilities" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.642675 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="25c55198-7d39-4ee0-85fe-e6065f32d89b" containerName="registry-server" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.643385 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.646976 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-x9mc5" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.647338 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.668634 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-h2qhf"] Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.671087 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.690452 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-wcjrw"] Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.697993 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-h2qhf"] Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.804892 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f446bafd-364e-4088-a43d-9b4f21910312-scripts\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.804948 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzq54\" (UniqueName: \"kubernetes.io/projected/f446bafd-364e-4088-a43d-9b4f21910312-kube-api-access-pzq54\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.805089 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-var-log\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.805120 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0137d8ea-c9d8-442f-9a87-c827e0ad241a-var-run\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.805205 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-etc-ovs\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.805263 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0137d8ea-c9d8-442f-9a87-c827e0ad241a-var-log-ovn\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.805407 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0137d8ea-c9d8-442f-9a87-c827e0ad241a-var-run-ovn\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.805480 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-var-lib\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.805517 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-var-run\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.805569 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rm7n\" (UniqueName: \"kubernetes.io/projected/0137d8ea-c9d8-442f-9a87-c827e0ad241a-kube-api-access-5rm7n\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.805620 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0137d8ea-c9d8-442f-9a87-c827e0ad241a-scripts\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.907708 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rm7n\" (UniqueName: \"kubernetes.io/projected/0137d8ea-c9d8-442f-9a87-c827e0ad241a-kube-api-access-5rm7n\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.907761 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0137d8ea-c9d8-442f-9a87-c827e0ad241a-scripts\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.907826 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f446bafd-364e-4088-a43d-9b4f21910312-scripts\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.907852 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzq54\" (UniqueName: \"kubernetes.io/projected/f446bafd-364e-4088-a43d-9b4f21910312-kube-api-access-pzq54\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.907876 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-var-log\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.907892 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0137d8ea-c9d8-442f-9a87-c827e0ad241a-var-run\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.907931 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-etc-ovs\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.907955 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0137d8ea-c9d8-442f-9a87-c827e0ad241a-var-log-ovn\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908007 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0137d8ea-c9d8-442f-9a87-c827e0ad241a-var-run-ovn\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908058 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-var-lib\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908082 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-var-run\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908290 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0137d8ea-c9d8-442f-9a87-c827e0ad241a-var-run-ovn\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908292 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0137d8ea-c9d8-442f-9a87-c827e0ad241a-var-run\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908292 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-etc-ovs\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908357 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-var-lib\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908292 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-var-log\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908366 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0137d8ea-c9d8-442f-9a87-c827e0ad241a-var-log-ovn\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.908349 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f446bafd-364e-4088-a43d-9b4f21910312-var-run\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.910072 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0137d8ea-c9d8-442f-9a87-c827e0ad241a-scripts\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.912112 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f446bafd-364e-4088-a43d-9b4f21910312-scripts\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.927724 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzq54\" (UniqueName: \"kubernetes.io/projected/f446bafd-364e-4088-a43d-9b4f21910312-kube-api-access-pzq54\") pod \"ovn-controller-ovs-h2qhf\" (UID: \"f446bafd-364e-4088-a43d-9b4f21910312\") " pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.935204 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rm7n\" (UniqueName: \"kubernetes.io/projected/0137d8ea-c9d8-442f-9a87-c827e0ad241a-kube-api-access-5rm7n\") pod \"ovn-controller-wcjrw\" (UID: \"0137d8ea-c9d8-442f-9a87-c827e0ad241a\") " pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.966523 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:47 crc kubenswrapper[4922]: I0930 00:05:47.985654 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:48 crc kubenswrapper[4922]: I0930 00:05:48.451830 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-wcjrw"] Sep 30 00:05:48 crc kubenswrapper[4922]: I0930 00:05:48.842493 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-h2qhf"] Sep 30 00:05:48 crc kubenswrapper[4922]: W0930 00:05:48.845504 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf446bafd_364e_4088_a43d_9b4f21910312.slice/crio-fb8904e24ed4f64822205c066792f24a5400e31066bf81135b3e7ce72e4818d2 WatchSource:0}: Error finding container fb8904e24ed4f64822205c066792f24a5400e31066bf81135b3e7ce72e4818d2: Status 404 returned error can't find the container with id fb8904e24ed4f64822205c066792f24a5400e31066bf81135b3e7ce72e4818d2 Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.044616 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wcjrw" event={"ID":"0137d8ea-c9d8-442f-9a87-c827e0ad241a","Type":"ContainerStarted","Data":"bd5a90336a6d1e4f930cba4d66c269479762efd7c7f9732ed9a447ebc7501a7a"} Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.044668 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wcjrw" event={"ID":"0137d8ea-c9d8-442f-9a87-c827e0ad241a","Type":"ContainerStarted","Data":"9c5a106b80725ad7267d89040badd6f83dc79de79f133d1fbf0c80a8efecdf49"} Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.044735 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-wcjrw" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.047631 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-h2qhf" event={"ID":"f446bafd-364e-4088-a43d-9b4f21910312","Type":"ContainerStarted","Data":"fb8904e24ed4f64822205c066792f24a5400e31066bf81135b3e7ce72e4818d2"} Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.068472 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-wcjrw" podStartSLOduration=2.068453859 podStartE2EDuration="2.068453859s" podCreationTimestamp="2025-09-30 00:05:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:05:49.06606463 +0000 UTC m=+5953.376353443" watchObservedRunningTime="2025-09-30 00:05:49.068453859 +0000 UTC m=+5953.378742692" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.263152 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-xqs6b"] Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.265676 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.269781 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.303113 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-xqs6b"] Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.346433 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-ovn-rundir\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.346539 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-ovs-rundir\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.346656 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-config\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.346802 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmpx7\" (UniqueName: \"kubernetes.io/projected/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-kube-api-access-qmpx7\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.448459 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-ovn-rundir\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.448523 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-ovs-rundir\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.448573 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-config\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.448644 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmpx7\" (UniqueName: \"kubernetes.io/projected/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-kube-api-access-qmpx7\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.448876 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-ovs-rundir\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.448963 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-ovn-rundir\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.449847 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-config\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.468368 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmpx7\" (UniqueName: \"kubernetes.io/projected/f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5-kube-api-access-qmpx7\") pod \"ovn-controller-metrics-xqs6b\" (UID: \"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5\") " pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:49 crc kubenswrapper[4922]: I0930 00:05:49.588951 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-xqs6b" Sep 30 00:05:50 crc kubenswrapper[4922]: I0930 00:05:50.057036 4922 generic.go:334] "Generic (PLEG): container finished" podID="f446bafd-364e-4088-a43d-9b4f21910312" containerID="560e6fd4ab043b3a1dec71fcd36efa347a7409231a70b64bc7e376d898dfc19a" exitCode=0 Sep 30 00:05:50 crc kubenswrapper[4922]: I0930 00:05:50.058309 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-h2qhf" event={"ID":"f446bafd-364e-4088-a43d-9b4f21910312","Type":"ContainerDied","Data":"560e6fd4ab043b3a1dec71fcd36efa347a7409231a70b64bc7e376d898dfc19a"} Sep 30 00:05:50 crc kubenswrapper[4922]: I0930 00:05:50.072543 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-xqs6b"] Sep 30 00:05:50 crc kubenswrapper[4922]: I0930 00:05:50.682257 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-jf6bs"] Sep 30 00:05:50 crc kubenswrapper[4922]: I0930 00:05:50.684185 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-jf6bs" Sep 30 00:05:50 crc kubenswrapper[4922]: I0930 00:05:50.693226 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-jf6bs"] Sep 30 00:05:50 crc kubenswrapper[4922]: I0930 00:05:50.782571 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld6pj\" (UniqueName: \"kubernetes.io/projected/9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9-kube-api-access-ld6pj\") pod \"octavia-db-create-jf6bs\" (UID: \"9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9\") " pod="openstack/octavia-db-create-jf6bs" Sep 30 00:05:50 crc kubenswrapper[4922]: I0930 00:05:50.884535 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld6pj\" (UniqueName: \"kubernetes.io/projected/9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9-kube-api-access-ld6pj\") pod \"octavia-db-create-jf6bs\" (UID: \"9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9\") " pod="openstack/octavia-db-create-jf6bs" Sep 30 00:05:50 crc kubenswrapper[4922]: I0930 00:05:50.905867 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld6pj\" (UniqueName: \"kubernetes.io/projected/9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9-kube-api-access-ld6pj\") pod \"octavia-db-create-jf6bs\" (UID: \"9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9\") " pod="openstack/octavia-db-create-jf6bs" Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.001632 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-jf6bs" Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.076941 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-h2qhf" event={"ID":"f446bafd-364e-4088-a43d-9b4f21910312","Type":"ContainerStarted","Data":"529c566ad45597af7755529fe257326cb281cd0eceed1e70c43584f96357235a"} Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.077337 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.077350 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-h2qhf" event={"ID":"f446bafd-364e-4088-a43d-9b4f21910312","Type":"ContainerStarted","Data":"ac95416640ed4ee0e878b8c15e9e6c53a9e81a642231e6ea8823976d96cff83c"} Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.077362 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.084944 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-xqs6b" event={"ID":"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5","Type":"ContainerStarted","Data":"93335a3a487a84c1e67c14efbd43d0bd46534845ed9b1a09cf4d482b04c96285"} Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.084976 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-xqs6b" event={"ID":"f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5","Type":"ContainerStarted","Data":"f4ea6b1372c875819d9bd86d025d7aa80e8491238604f41612a8d6aa6f156c21"} Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.101914 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-h2qhf" podStartSLOduration=4.10189847 podStartE2EDuration="4.10189847s" podCreationTimestamp="2025-09-30 00:05:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:05:51.098875376 +0000 UTC m=+5955.409164189" watchObservedRunningTime="2025-09-30 00:05:51.10189847 +0000 UTC m=+5955.412187283" Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.131296 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-xqs6b" podStartSLOduration=2.131279565 podStartE2EDuration="2.131279565s" podCreationTimestamp="2025-09-30 00:05:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:05:51.126577679 +0000 UTC m=+5955.436866482" watchObservedRunningTime="2025-09-30 00:05:51.131279565 +0000 UTC m=+5955.441568378" Sep 30 00:05:51 crc kubenswrapper[4922]: I0930 00:05:51.536865 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-jf6bs"] Sep 30 00:05:52 crc kubenswrapper[4922]: I0930 00:05:52.095450 4922 generic.go:334] "Generic (PLEG): container finished" podID="9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9" containerID="75c33abaa1c299d844021f0b2769504ff502ef71909f3243d882bb42b4967184" exitCode=0 Sep 30 00:05:52 crc kubenswrapper[4922]: I0930 00:05:52.095505 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-jf6bs" event={"ID":"9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9","Type":"ContainerDied","Data":"75c33abaa1c299d844021f0b2769504ff502ef71909f3243d882bb42b4967184"} Sep 30 00:05:52 crc kubenswrapper[4922]: I0930 00:05:52.095576 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-jf6bs" event={"ID":"9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9","Type":"ContainerStarted","Data":"d0f10027fbf083fc6fe5b74dd03f6043d414a67d3ba0a1e055df3ad8dca2f2c9"} Sep 30 00:05:53 crc kubenswrapper[4922]: I0930 00:05:53.560731 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-jf6bs" Sep 30 00:05:53 crc kubenswrapper[4922]: I0930 00:05:53.650918 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld6pj\" (UniqueName: \"kubernetes.io/projected/9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9-kube-api-access-ld6pj\") pod \"9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9\" (UID: \"9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9\") " Sep 30 00:05:53 crc kubenswrapper[4922]: I0930 00:05:53.660564 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9-kube-api-access-ld6pj" (OuterVolumeSpecName: "kube-api-access-ld6pj") pod "9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9" (UID: "9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9"). InnerVolumeSpecName "kube-api-access-ld6pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:05:53 crc kubenswrapper[4922]: I0930 00:05:53.752145 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld6pj\" (UniqueName: \"kubernetes.io/projected/9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9-kube-api-access-ld6pj\") on node \"crc\" DevicePath \"\"" Sep 30 00:05:54 crc kubenswrapper[4922]: I0930 00:05:54.119344 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-jf6bs" event={"ID":"9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9","Type":"ContainerDied","Data":"d0f10027fbf083fc6fe5b74dd03f6043d414a67d3ba0a1e055df3ad8dca2f2c9"} Sep 30 00:05:54 crc kubenswrapper[4922]: I0930 00:05:54.119411 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0f10027fbf083fc6fe5b74dd03f6043d414a67d3ba0a1e055df3ad8dca2f2c9" Sep 30 00:05:54 crc kubenswrapper[4922]: I0930 00:05:54.119423 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-jf6bs" Sep 30 00:05:58 crc kubenswrapper[4922]: I0930 00:05:58.912831 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:05:58 crc kubenswrapper[4922]: I0930 00:05:58.913435 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:06:02 crc kubenswrapper[4922]: I0930 00:06:02.716021 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-136c-account-create-cfqr8"] Sep 30 00:06:02 crc kubenswrapper[4922]: E0930 00:06:02.717567 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9" containerName="mariadb-database-create" Sep 30 00:06:02 crc kubenswrapper[4922]: I0930 00:06:02.717597 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9" containerName="mariadb-database-create" Sep 30 00:06:02 crc kubenswrapper[4922]: I0930 00:06:02.718000 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9" containerName="mariadb-database-create" Sep 30 00:06:02 crc kubenswrapper[4922]: I0930 00:06:02.719231 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-136c-account-create-cfqr8" Sep 30 00:06:02 crc kubenswrapper[4922]: I0930 00:06:02.721910 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Sep 30 00:06:02 crc kubenswrapper[4922]: I0930 00:06:02.732104 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-136c-account-create-cfqr8"] Sep 30 00:06:02 crc kubenswrapper[4922]: I0930 00:06:02.848112 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xtlh\" (UniqueName: \"kubernetes.io/projected/3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f-kube-api-access-9xtlh\") pod \"octavia-136c-account-create-cfqr8\" (UID: \"3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f\") " pod="openstack/octavia-136c-account-create-cfqr8" Sep 30 00:06:02 crc kubenswrapper[4922]: I0930 00:06:02.950773 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xtlh\" (UniqueName: \"kubernetes.io/projected/3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f-kube-api-access-9xtlh\") pod \"octavia-136c-account-create-cfqr8\" (UID: \"3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f\") " pod="openstack/octavia-136c-account-create-cfqr8" Sep 30 00:06:02 crc kubenswrapper[4922]: I0930 00:06:02.984196 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xtlh\" (UniqueName: \"kubernetes.io/projected/3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f-kube-api-access-9xtlh\") pod \"octavia-136c-account-create-cfqr8\" (UID: \"3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f\") " pod="openstack/octavia-136c-account-create-cfqr8" Sep 30 00:06:03 crc kubenswrapper[4922]: I0930 00:06:03.054964 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-136c-account-create-cfqr8" Sep 30 00:06:03 crc kubenswrapper[4922]: I0930 00:06:03.629755 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-136c-account-create-cfqr8"] Sep 30 00:06:03 crc kubenswrapper[4922]: W0930 00:06:03.643412 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e6dd6f0_e7fb_4ab5_8e50_6937937e2d4f.slice/crio-30c93dcf5493b4b07cc634c8bb265c40bca3ef30cd3aa50843d4bc79732e02dc WatchSource:0}: Error finding container 30c93dcf5493b4b07cc634c8bb265c40bca3ef30cd3aa50843d4bc79732e02dc: Status 404 returned error can't find the container with id 30c93dcf5493b4b07cc634c8bb265c40bca3ef30cd3aa50843d4bc79732e02dc Sep 30 00:06:04 crc kubenswrapper[4922]: I0930 00:06:04.242601 4922 generic.go:334] "Generic (PLEG): container finished" podID="3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f" containerID="4bbd87d9b98c36c6c9a16f5094bc99997802869a4a8b8fcd57b9957854e31e47" exitCode=0 Sep 30 00:06:04 crc kubenswrapper[4922]: I0930 00:06:04.242692 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-136c-account-create-cfqr8" event={"ID":"3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f","Type":"ContainerDied","Data":"4bbd87d9b98c36c6c9a16f5094bc99997802869a4a8b8fcd57b9957854e31e47"} Sep 30 00:06:04 crc kubenswrapper[4922]: I0930 00:06:04.242996 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-136c-account-create-cfqr8" event={"ID":"3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f","Type":"ContainerStarted","Data":"30c93dcf5493b4b07cc634c8bb265c40bca3ef30cd3aa50843d4bc79732e02dc"} Sep 30 00:06:05 crc kubenswrapper[4922]: I0930 00:06:05.732228 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-136c-account-create-cfqr8" Sep 30 00:06:05 crc kubenswrapper[4922]: I0930 00:06:05.827497 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xtlh\" (UniqueName: \"kubernetes.io/projected/3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f-kube-api-access-9xtlh\") pod \"3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f\" (UID: \"3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f\") " Sep 30 00:06:05 crc kubenswrapper[4922]: I0930 00:06:05.836738 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f-kube-api-access-9xtlh" (OuterVolumeSpecName: "kube-api-access-9xtlh") pod "3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f" (UID: "3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f"). InnerVolumeSpecName "kube-api-access-9xtlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:06:05 crc kubenswrapper[4922]: I0930 00:06:05.933953 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xtlh\" (UniqueName: \"kubernetes.io/projected/3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f-kube-api-access-9xtlh\") on node \"crc\" DevicePath \"\"" Sep 30 00:06:06 crc kubenswrapper[4922]: I0930 00:06:06.272785 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-136c-account-create-cfqr8" event={"ID":"3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f","Type":"ContainerDied","Data":"30c93dcf5493b4b07cc634c8bb265c40bca3ef30cd3aa50843d4bc79732e02dc"} Sep 30 00:06:06 crc kubenswrapper[4922]: I0930 00:06:06.273181 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30c93dcf5493b4b07cc634c8bb265c40bca3ef30cd3aa50843d4bc79732e02dc" Sep 30 00:06:06 crc kubenswrapper[4922]: I0930 00:06:06.272915 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-136c-account-create-cfqr8" Sep 30 00:06:09 crc kubenswrapper[4922]: I0930 00:06:09.531415 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-qcvft"] Sep 30 00:06:09 crc kubenswrapper[4922]: E0930 00:06:09.532652 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f" containerName="mariadb-account-create" Sep 30 00:06:09 crc kubenswrapper[4922]: I0930 00:06:09.532670 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f" containerName="mariadb-account-create" Sep 30 00:06:09 crc kubenswrapper[4922]: I0930 00:06:09.532917 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f" containerName="mariadb-account-create" Sep 30 00:06:09 crc kubenswrapper[4922]: I0930 00:06:09.533589 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-qcvft" Sep 30 00:06:09 crc kubenswrapper[4922]: I0930 00:06:09.605378 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-qcvft"] Sep 30 00:06:09 crc kubenswrapper[4922]: I0930 00:06:09.616967 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m894\" (UniqueName: \"kubernetes.io/projected/63605a12-e656-4baa-8b39-208131d28a5c-kube-api-access-4m894\") pod \"octavia-persistence-db-create-qcvft\" (UID: \"63605a12-e656-4baa-8b39-208131d28a5c\") " pod="openstack/octavia-persistence-db-create-qcvft" Sep 30 00:06:09 crc kubenswrapper[4922]: I0930 00:06:09.719639 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m894\" (UniqueName: \"kubernetes.io/projected/63605a12-e656-4baa-8b39-208131d28a5c-kube-api-access-4m894\") pod \"octavia-persistence-db-create-qcvft\" (UID: \"63605a12-e656-4baa-8b39-208131d28a5c\") " pod="openstack/octavia-persistence-db-create-qcvft" Sep 30 00:06:09 crc kubenswrapper[4922]: I0930 00:06:09.752257 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m894\" (UniqueName: \"kubernetes.io/projected/63605a12-e656-4baa-8b39-208131d28a5c-kube-api-access-4m894\") pod \"octavia-persistence-db-create-qcvft\" (UID: \"63605a12-e656-4baa-8b39-208131d28a5c\") " pod="openstack/octavia-persistence-db-create-qcvft" Sep 30 00:06:09 crc kubenswrapper[4922]: I0930 00:06:09.880143 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-qcvft" Sep 30 00:06:10 crc kubenswrapper[4922]: I0930 00:06:10.150707 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-qcvft"] Sep 30 00:06:10 crc kubenswrapper[4922]: W0930 00:06:10.157701 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63605a12_e656_4baa_8b39_208131d28a5c.slice/crio-8185a25ccc9fa1b07ee20620843b0a6d04a5d7231e3a5a7253df010e759cd746 WatchSource:0}: Error finding container 8185a25ccc9fa1b07ee20620843b0a6d04a5d7231e3a5a7253df010e759cd746: Status 404 returned error can't find the container with id 8185a25ccc9fa1b07ee20620843b0a6d04a5d7231e3a5a7253df010e759cd746 Sep 30 00:06:10 crc kubenswrapper[4922]: I0930 00:06:10.319204 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-qcvft" event={"ID":"63605a12-e656-4baa-8b39-208131d28a5c","Type":"ContainerStarted","Data":"d3dce7fa9d953efd0f0792ae18ad625b1dae69a7f64b8c560f4517585176a232"} Sep 30 00:06:10 crc kubenswrapper[4922]: I0930 00:06:10.319257 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-qcvft" event={"ID":"63605a12-e656-4baa-8b39-208131d28a5c","Type":"ContainerStarted","Data":"8185a25ccc9fa1b07ee20620843b0a6d04a5d7231e3a5a7253df010e759cd746"} Sep 30 00:06:10 crc kubenswrapper[4922]: I0930 00:06:10.348157 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-persistence-db-create-qcvft" podStartSLOduration=1.348138817 podStartE2EDuration="1.348138817s" podCreationTimestamp="2025-09-30 00:06:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:06:10.341359369 +0000 UTC m=+5974.651648182" watchObservedRunningTime="2025-09-30 00:06:10.348138817 +0000 UTC m=+5974.658427630" Sep 30 00:06:11 crc kubenswrapper[4922]: I0930 00:06:11.331302 4922 generic.go:334] "Generic (PLEG): container finished" podID="63605a12-e656-4baa-8b39-208131d28a5c" containerID="d3dce7fa9d953efd0f0792ae18ad625b1dae69a7f64b8c560f4517585176a232" exitCode=0 Sep 30 00:06:11 crc kubenswrapper[4922]: I0930 00:06:11.331467 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-qcvft" event={"ID":"63605a12-e656-4baa-8b39-208131d28a5c","Type":"ContainerDied","Data":"d3dce7fa9d953efd0f0792ae18ad625b1dae69a7f64b8c560f4517585176a232"} Sep 30 00:06:12 crc kubenswrapper[4922]: I0930 00:06:12.767828 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-qcvft" Sep 30 00:06:12 crc kubenswrapper[4922]: I0930 00:06:12.776424 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4m894\" (UniqueName: \"kubernetes.io/projected/63605a12-e656-4baa-8b39-208131d28a5c-kube-api-access-4m894\") pod \"63605a12-e656-4baa-8b39-208131d28a5c\" (UID: \"63605a12-e656-4baa-8b39-208131d28a5c\") " Sep 30 00:06:12 crc kubenswrapper[4922]: I0930 00:06:12.787695 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63605a12-e656-4baa-8b39-208131d28a5c-kube-api-access-4m894" (OuterVolumeSpecName: "kube-api-access-4m894") pod "63605a12-e656-4baa-8b39-208131d28a5c" (UID: "63605a12-e656-4baa-8b39-208131d28a5c"). InnerVolumeSpecName "kube-api-access-4m894". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:06:12 crc kubenswrapper[4922]: I0930 00:06:12.879337 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4m894\" (UniqueName: \"kubernetes.io/projected/63605a12-e656-4baa-8b39-208131d28a5c-kube-api-access-4m894\") on node \"crc\" DevicePath \"\"" Sep 30 00:06:13 crc kubenswrapper[4922]: I0930 00:06:13.357471 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-qcvft" event={"ID":"63605a12-e656-4baa-8b39-208131d28a5c","Type":"ContainerDied","Data":"8185a25ccc9fa1b07ee20620843b0a6d04a5d7231e3a5a7253df010e759cd746"} Sep 30 00:06:13 crc kubenswrapper[4922]: I0930 00:06:13.357819 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8185a25ccc9fa1b07ee20620843b0a6d04a5d7231e3a5a7253df010e759cd746" Sep 30 00:06:13 crc kubenswrapper[4922]: I0930 00:06:13.357544 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-qcvft" Sep 30 00:06:20 crc kubenswrapper[4922]: I0930 00:06:20.717272 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-0333-account-create-hdnzt"] Sep 30 00:06:20 crc kubenswrapper[4922]: E0930 00:06:20.718328 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63605a12-e656-4baa-8b39-208131d28a5c" containerName="mariadb-database-create" Sep 30 00:06:20 crc kubenswrapper[4922]: I0930 00:06:20.718343 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="63605a12-e656-4baa-8b39-208131d28a5c" containerName="mariadb-database-create" Sep 30 00:06:20 crc kubenswrapper[4922]: I0930 00:06:20.718633 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="63605a12-e656-4baa-8b39-208131d28a5c" containerName="mariadb-database-create" Sep 30 00:06:20 crc kubenswrapper[4922]: I0930 00:06:20.719503 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-0333-account-create-hdnzt" Sep 30 00:06:20 crc kubenswrapper[4922]: I0930 00:06:20.721634 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Sep 30 00:06:20 crc kubenswrapper[4922]: I0930 00:06:20.747988 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-0333-account-create-hdnzt"] Sep 30 00:06:20 crc kubenswrapper[4922]: I0930 00:06:20.856147 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg2pg\" (UniqueName: \"kubernetes.io/projected/3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c-kube-api-access-pg2pg\") pod \"octavia-0333-account-create-hdnzt\" (UID: \"3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c\") " pod="openstack/octavia-0333-account-create-hdnzt" Sep 30 00:06:20 crc kubenswrapper[4922]: I0930 00:06:20.958116 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg2pg\" (UniqueName: \"kubernetes.io/projected/3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c-kube-api-access-pg2pg\") pod \"octavia-0333-account-create-hdnzt\" (UID: \"3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c\") " pod="openstack/octavia-0333-account-create-hdnzt" Sep 30 00:06:20 crc kubenswrapper[4922]: I0930 00:06:20.990781 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg2pg\" (UniqueName: \"kubernetes.io/projected/3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c-kube-api-access-pg2pg\") pod \"octavia-0333-account-create-hdnzt\" (UID: \"3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c\") " pod="openstack/octavia-0333-account-create-hdnzt" Sep 30 00:06:21 crc kubenswrapper[4922]: I0930 00:06:21.048078 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-0333-account-create-hdnzt" Sep 30 00:06:21 crc kubenswrapper[4922]: I0930 00:06:21.507894 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-0333-account-create-hdnzt"] Sep 30 00:06:22 crc kubenswrapper[4922]: I0930 00:06:22.457563 4922 generic.go:334] "Generic (PLEG): container finished" podID="3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c" containerID="a52b1c46aa548d742031368e23b2372a0b981271573ed914d972c003ccecb639" exitCode=0 Sep 30 00:06:22 crc kubenswrapper[4922]: I0930 00:06:22.457617 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-0333-account-create-hdnzt" event={"ID":"3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c","Type":"ContainerDied","Data":"a52b1c46aa548d742031368e23b2372a0b981271573ed914d972c003ccecb639"} Sep 30 00:06:22 crc kubenswrapper[4922]: I0930 00:06:22.457647 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-0333-account-create-hdnzt" event={"ID":"3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c","Type":"ContainerStarted","Data":"cf23f610ff2644d6b5b702b94b4ddb367a20b381bb9d3c0f2a4bbb6d716ee66a"} Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.028821 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-wcjrw" podUID="0137d8ea-c9d8-442f-9a87-c827e0ad241a" containerName="ovn-controller" probeResult="failure" output=< Sep 30 00:06:23 crc kubenswrapper[4922]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 00:06:23 crc kubenswrapper[4922]: > Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.044939 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.048450 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-h2qhf" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.242151 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-wcjrw-config-9zg4h"] Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.270931 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-wcjrw-config-9zg4h"] Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.271074 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.290225 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.407915 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-scripts\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.407964 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run-ovn\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.408139 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-log-ovn\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.408291 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ljrw\" (UniqueName: \"kubernetes.io/projected/c3fc7cc8-759e-423c-9794-4bb126fdad46-kube-api-access-7ljrw\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.408543 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.408795 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-additional-scripts\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.510447 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-log-ovn\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.510610 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ljrw\" (UniqueName: \"kubernetes.io/projected/c3fc7cc8-759e-423c-9794-4bb126fdad46-kube-api-access-7ljrw\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.510659 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.510867 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-additional-scripts\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.511018 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.511160 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-scripts\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.511221 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run-ovn\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.511932 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-log-ovn\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.511934 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run-ovn\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.512249 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-additional-scripts\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.513933 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-scripts\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.532254 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ljrw\" (UniqueName: \"kubernetes.io/projected/c3fc7cc8-759e-423c-9794-4bb126fdad46-kube-api-access-7ljrw\") pod \"ovn-controller-wcjrw-config-9zg4h\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.599000 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.810051 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-0333-account-create-hdnzt" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.816474 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg2pg\" (UniqueName: \"kubernetes.io/projected/3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c-kube-api-access-pg2pg\") pod \"3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c\" (UID: \"3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c\") " Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.828086 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c-kube-api-access-pg2pg" (OuterVolumeSpecName: "kube-api-access-pg2pg") pod "3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c" (UID: "3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c"). InnerVolumeSpecName "kube-api-access-pg2pg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:06:23 crc kubenswrapper[4922]: I0930 00:06:23.919137 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg2pg\" (UniqueName: \"kubernetes.io/projected/3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c-kube-api-access-pg2pg\") on node \"crc\" DevicePath \"\"" Sep 30 00:06:24 crc kubenswrapper[4922]: I0930 00:06:24.100956 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-wcjrw-config-9zg4h"] Sep 30 00:06:24 crc kubenswrapper[4922]: I0930 00:06:24.481884 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-0333-account-create-hdnzt" event={"ID":"3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c","Type":"ContainerDied","Data":"cf23f610ff2644d6b5b702b94b4ddb367a20b381bb9d3c0f2a4bbb6d716ee66a"} Sep 30 00:06:24 crc kubenswrapper[4922]: I0930 00:06:24.481927 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf23f610ff2644d6b5b702b94b4ddb367a20b381bb9d3c0f2a4bbb6d716ee66a" Sep 30 00:06:24 crc kubenswrapper[4922]: I0930 00:06:24.481989 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-0333-account-create-hdnzt" Sep 30 00:06:24 crc kubenswrapper[4922]: I0930 00:06:24.484487 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wcjrw-config-9zg4h" event={"ID":"c3fc7cc8-759e-423c-9794-4bb126fdad46","Type":"ContainerStarted","Data":"ab4e10ed4824b61df7e5a6de8b6814e343adfd0e0cf6a9e15ec0223bf28e4884"} Sep 30 00:06:24 crc kubenswrapper[4922]: I0930 00:06:24.484526 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wcjrw-config-9zg4h" event={"ID":"c3fc7cc8-759e-423c-9794-4bb126fdad46","Type":"ContainerStarted","Data":"71830f614c01ec4c7a2c46d99e7a1852f47e64749f5aba0592c98a351e819851"} Sep 30 00:06:24 crc kubenswrapper[4922]: I0930 00:06:24.511090 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-wcjrw-config-9zg4h" podStartSLOduration=1.5110691809999999 podStartE2EDuration="1.511069181s" podCreationTimestamp="2025-09-30 00:06:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:06:24.508932578 +0000 UTC m=+5988.819221421" watchObservedRunningTime="2025-09-30 00:06:24.511069181 +0000 UTC m=+5988.821358004" Sep 30 00:06:25 crc kubenswrapper[4922]: I0930 00:06:25.500764 4922 generic.go:334] "Generic (PLEG): container finished" podID="c3fc7cc8-759e-423c-9794-4bb126fdad46" containerID="ab4e10ed4824b61df7e5a6de8b6814e343adfd0e0cf6a9e15ec0223bf28e4884" exitCode=0 Sep 30 00:06:25 crc kubenswrapper[4922]: I0930 00:06:25.500831 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wcjrw-config-9zg4h" event={"ID":"c3fc7cc8-759e-423c-9794-4bb126fdad46","Type":"ContainerDied","Data":"ab4e10ed4824b61df7e5a6de8b6814e343adfd0e0cf6a9e15ec0223bf28e4884"} Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.858745 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.981335 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-additional-scripts\") pod \"c3fc7cc8-759e-423c-9794-4bb126fdad46\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.981399 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ljrw\" (UniqueName: \"kubernetes.io/projected/c3fc7cc8-759e-423c-9794-4bb126fdad46-kube-api-access-7ljrw\") pod \"c3fc7cc8-759e-423c-9794-4bb126fdad46\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.981476 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-scripts\") pod \"c3fc7cc8-759e-423c-9794-4bb126fdad46\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.981556 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run-ovn\") pod \"c3fc7cc8-759e-423c-9794-4bb126fdad46\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.981588 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-log-ovn\") pod \"c3fc7cc8-759e-423c-9794-4bb126fdad46\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.981674 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run\") pod \"c3fc7cc8-759e-423c-9794-4bb126fdad46\" (UID: \"c3fc7cc8-759e-423c-9794-4bb126fdad46\") " Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.982070 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run" (OuterVolumeSpecName: "var-run") pod "c3fc7cc8-759e-423c-9794-4bb126fdad46" (UID: "c3fc7cc8-759e-423c-9794-4bb126fdad46"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.982635 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "c3fc7cc8-759e-423c-9794-4bb126fdad46" (UID: "c3fc7cc8-759e-423c-9794-4bb126fdad46"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.983081 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "c3fc7cc8-759e-423c-9794-4bb126fdad46" (UID: "c3fc7cc8-759e-423c-9794-4bb126fdad46"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.983110 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "c3fc7cc8-759e-423c-9794-4bb126fdad46" (UID: "c3fc7cc8-759e-423c-9794-4bb126fdad46"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.983365 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-scripts" (OuterVolumeSpecName: "scripts") pod "c3fc7cc8-759e-423c-9794-4bb126fdad46" (UID: "c3fc7cc8-759e-423c-9794-4bb126fdad46"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:06:26 crc kubenswrapper[4922]: I0930 00:06:26.988542 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3fc7cc8-759e-423c-9794-4bb126fdad46-kube-api-access-7ljrw" (OuterVolumeSpecName: "kube-api-access-7ljrw") pod "c3fc7cc8-759e-423c-9794-4bb126fdad46" (UID: "c3fc7cc8-759e-423c-9794-4bb126fdad46"). InnerVolumeSpecName "kube-api-access-7ljrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.083597 4922 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.083652 4922 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.083666 4922 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c3fc7cc8-759e-423c-9794-4bb126fdad46-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.083678 4922 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.083694 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ljrw\" (UniqueName: \"kubernetes.io/projected/c3fc7cc8-759e-423c-9794-4bb126fdad46-kube-api-access-7ljrw\") on node \"crc\" DevicePath \"\"" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.083705 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c3fc7cc8-759e-423c-9794-4bb126fdad46-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.570030 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-wcjrw-config-9zg4h" event={"ID":"c3fc7cc8-759e-423c-9794-4bb126fdad46","Type":"ContainerDied","Data":"71830f614c01ec4c7a2c46d99e7a1852f47e64749f5aba0592c98a351e819851"} Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.570701 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71830f614c01ec4c7a2c46d99e7a1852f47e64749f5aba0592c98a351e819851" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.570385 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-wcjrw-config-9zg4h" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.618577 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-wcjrw-config-9zg4h"] Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.644420 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-wcjrw-config-9zg4h"] Sep 30 00:06:27 crc kubenswrapper[4922]: E0930 00:06:27.734606 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3fc7cc8_759e_423c_9794_4bb126fdad46.slice/crio-71830f614c01ec4c7a2c46d99e7a1852f47e64749f5aba0592c98a351e819851\": RecentStats: unable to find data in memory cache]" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.850701 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-795867f848-rw9ph"] Sep 30 00:06:27 crc kubenswrapper[4922]: E0930 00:06:27.851215 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3fc7cc8-759e-423c-9794-4bb126fdad46" containerName="ovn-config" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.851238 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3fc7cc8-759e-423c-9794-4bb126fdad46" containerName="ovn-config" Sep 30 00:06:27 crc kubenswrapper[4922]: E0930 00:06:27.851263 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c" containerName="mariadb-account-create" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.851275 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c" containerName="mariadb-account-create" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.851520 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c" containerName="mariadb-account-create" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.851565 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3fc7cc8-759e-423c-9794-4bb126fdad46" containerName="ovn-config" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.853370 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.855553 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.855966 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-5kjwv" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.855970 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Sep 30 00:06:27 crc kubenswrapper[4922]: I0930 00:06:27.881085 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-795867f848-rw9ph"] Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.006203 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/4378081b-61ce-4cb4-8363-efb6e00cfd5b-octavia-run\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.006309 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4378081b-61ce-4cb4-8363-efb6e00cfd5b-config-data-merged\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.006345 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4378081b-61ce-4cb4-8363-efb6e00cfd5b-combined-ca-bundle\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.006366 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4378081b-61ce-4cb4-8363-efb6e00cfd5b-scripts\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.006457 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4378081b-61ce-4cb4-8363-efb6e00cfd5b-config-data\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.015927 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-wcjrw" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.108007 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4378081b-61ce-4cb4-8363-efb6e00cfd5b-config-data-merged\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.108064 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4378081b-61ce-4cb4-8363-efb6e00cfd5b-combined-ca-bundle\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.108087 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4378081b-61ce-4cb4-8363-efb6e00cfd5b-scripts\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.108131 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4378081b-61ce-4cb4-8363-efb6e00cfd5b-config-data\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.108272 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/4378081b-61ce-4cb4-8363-efb6e00cfd5b-octavia-run\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.108704 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/4378081b-61ce-4cb4-8363-efb6e00cfd5b-octavia-run\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.109082 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/4378081b-61ce-4cb4-8363-efb6e00cfd5b-config-data-merged\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.113472 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4378081b-61ce-4cb4-8363-efb6e00cfd5b-scripts\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.114207 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4378081b-61ce-4cb4-8363-efb6e00cfd5b-config-data\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.116326 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4378081b-61ce-4cb4-8363-efb6e00cfd5b-combined-ca-bundle\") pod \"octavia-api-795867f848-rw9ph\" (UID: \"4378081b-61ce-4cb4-8363-efb6e00cfd5b\") " pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.177040 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.433711 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3fc7cc8-759e-423c-9794-4bb126fdad46" path="/var/lib/kubelet/pods/c3fc7cc8-759e-423c-9794-4bb126fdad46/volumes" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.712115 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-795867f848-rw9ph"] Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.913058 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.913162 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.913244 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.914491 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:06:28 crc kubenswrapper[4922]: I0930 00:06:28.914605 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" gracePeriod=600 Sep 30 00:06:29 crc kubenswrapper[4922]: E0930 00:06:29.034174 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:06:29 crc kubenswrapper[4922]: I0930 00:06:29.588168 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-795867f848-rw9ph" event={"ID":"4378081b-61ce-4cb4-8363-efb6e00cfd5b","Type":"ContainerStarted","Data":"d8a1621c8998e1aad8135c9ab78d29059450f0e8b23c4fe5122ac5770fa5b7c2"} Sep 30 00:06:29 crc kubenswrapper[4922]: I0930 00:06:29.591508 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" exitCode=0 Sep 30 00:06:29 crc kubenswrapper[4922]: I0930 00:06:29.591555 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4"} Sep 30 00:06:29 crc kubenswrapper[4922]: I0930 00:06:29.591597 4922 scope.go:117] "RemoveContainer" containerID="86380e93f24d31c94f945d418afe09f1011e2f6445c628fb457b28215153e610" Sep 30 00:06:29 crc kubenswrapper[4922]: I0930 00:06:29.592173 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:06:29 crc kubenswrapper[4922]: E0930 00:06:29.592431 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:06:38 crc kubenswrapper[4922]: I0930 00:06:38.696729 4922 generic.go:334] "Generic (PLEG): container finished" podID="4378081b-61ce-4cb4-8363-efb6e00cfd5b" containerID="256293d17b2cf84159baffd71a0313d81cf5d27a8e5fe73505ff421b82de8cd7" exitCode=0 Sep 30 00:06:38 crc kubenswrapper[4922]: I0930 00:06:38.696831 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-795867f848-rw9ph" event={"ID":"4378081b-61ce-4cb4-8363-efb6e00cfd5b","Type":"ContainerDied","Data":"256293d17b2cf84159baffd71a0313d81cf5d27a8e5fe73505ff421b82de8cd7"} Sep 30 00:06:39 crc kubenswrapper[4922]: I0930 00:06:39.714763 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-795867f848-rw9ph" event={"ID":"4378081b-61ce-4cb4-8363-efb6e00cfd5b","Type":"ContainerStarted","Data":"5b0bd29ff8e628c774f678e18d7aa17a7857a26c17a8b5c9157994261fc5fc3e"} Sep 30 00:06:39 crc kubenswrapper[4922]: I0930 00:06:39.716612 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-795867f848-rw9ph" event={"ID":"4378081b-61ce-4cb4-8363-efb6e00cfd5b","Type":"ContainerStarted","Data":"5affa794eee1eeb5558f61fb3515f13fa52e2a647ee8ba5d6e05cd75c5beba7d"} Sep 30 00:06:39 crc kubenswrapper[4922]: I0930 00:06:39.717410 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:39 crc kubenswrapper[4922]: I0930 00:06:39.718406 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:39 crc kubenswrapper[4922]: I0930 00:06:39.740708 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-795867f848-rw9ph" podStartSLOduration=3.5350426820000003 podStartE2EDuration="12.740684864s" podCreationTimestamp="2025-09-30 00:06:27 +0000 UTC" firstStartedPulling="2025-09-30 00:06:28.717311602 +0000 UTC m=+5993.027600415" lastFinishedPulling="2025-09-30 00:06:37.922953774 +0000 UTC m=+6002.233242597" observedRunningTime="2025-09-30 00:06:39.73200608 +0000 UTC m=+6004.042294893" watchObservedRunningTime="2025-09-30 00:06:39.740684864 +0000 UTC m=+6004.050973677" Sep 30 00:06:44 crc kubenswrapper[4922]: I0930 00:06:44.425205 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:06:44 crc kubenswrapper[4922]: E0930 00:06:44.426003 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:06:47 crc kubenswrapper[4922]: I0930 00:06:47.276770 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.346562 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-56zpk"] Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.349320 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.351805 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.352503 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.354592 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.357375 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-56zpk"] Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.437654 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/649e7de2-5622-4c10-81c4-7a600e720f94-scripts\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.437741 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649e7de2-5622-4c10-81c4-7a600e720f94-config-data\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.437842 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/649e7de2-5622-4c10-81c4-7a600e720f94-hm-ports\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.437924 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/649e7de2-5622-4c10-81c4-7a600e720f94-config-data-merged\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.459761 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-795867f848-rw9ph" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.540013 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/649e7de2-5622-4c10-81c4-7a600e720f94-config-data-merged\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.540129 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/649e7de2-5622-4c10-81c4-7a600e720f94-scripts\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.540159 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649e7de2-5622-4c10-81c4-7a600e720f94-config-data\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.540244 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/649e7de2-5622-4c10-81c4-7a600e720f94-hm-ports\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.540755 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/649e7de2-5622-4c10-81c4-7a600e720f94-config-data-merged\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.541824 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/649e7de2-5622-4c10-81c4-7a600e720f94-hm-ports\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.549314 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/649e7de2-5622-4c10-81c4-7a600e720f94-scripts\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.549787 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/649e7de2-5622-4c10-81c4-7a600e720f94-config-data\") pod \"octavia-rsyslog-56zpk\" (UID: \"649e7de2-5622-4c10-81c4-7a600e720f94\") " pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:51 crc kubenswrapper[4922]: I0930 00:06:51.678179 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.233728 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-56zpk"] Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.469483 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-59f8cff499-dr5g8"] Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.471831 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.475115 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.496756 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-dr5g8"] Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.668986 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-httpd-config\") pod \"octavia-image-upload-59f8cff499-dr5g8\" (UID: \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\") " pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.669283 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-amphora-image\") pod \"octavia-image-upload-59f8cff499-dr5g8\" (UID: \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\") " pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.771398 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-httpd-config\") pod \"octavia-image-upload-59f8cff499-dr5g8\" (UID: \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\") " pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.771482 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-amphora-image\") pod \"octavia-image-upload-59f8cff499-dr5g8\" (UID: \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\") " pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.772034 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-amphora-image\") pod \"octavia-image-upload-59f8cff499-dr5g8\" (UID: \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\") " pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.778689 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-httpd-config\") pod \"octavia-image-upload-59f8cff499-dr5g8\" (UID: \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\") " pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.801296 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:06:52 crc kubenswrapper[4922]: I0930 00:06:52.836122 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-56zpk" event={"ID":"649e7de2-5622-4c10-81c4-7a600e720f94","Type":"ContainerStarted","Data":"e2b3a70e78a303f7b72a837a53ab647077e6f7b97d5eb9e1d7187e9a9a789a11"} Sep 30 00:06:53 crc kubenswrapper[4922]: I0930 00:06:53.269449 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-dr5g8"] Sep 30 00:06:53 crc kubenswrapper[4922]: W0930 00:06:53.280379 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbf65a33_4792_4cb5_8442_dbdfe2c2dea1.slice/crio-66f3c0d03677665097180f42aa2b1bca1f901224cc5b3285657767200735562d WatchSource:0}: Error finding container 66f3c0d03677665097180f42aa2b1bca1f901224cc5b3285657767200735562d: Status 404 returned error can't find the container with id 66f3c0d03677665097180f42aa2b1bca1f901224cc5b3285657767200735562d Sep 30 00:06:53 crc kubenswrapper[4922]: I0930 00:06:53.847415 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" event={"ID":"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1","Type":"ContainerStarted","Data":"66f3c0d03677665097180f42aa2b1bca1f901224cc5b3285657767200735562d"} Sep 30 00:06:54 crc kubenswrapper[4922]: I0930 00:06:54.862498 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-56zpk" event={"ID":"649e7de2-5622-4c10-81c4-7a600e720f94","Type":"ContainerStarted","Data":"ff57222153164eecac2552dfb098c7a25ea0edfe94aa2bfbece4b8ffa8600afe"} Sep 30 00:06:56 crc kubenswrapper[4922]: I0930 00:06:56.422175 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:06:56 crc kubenswrapper[4922]: E0930 00:06:56.422917 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:06:56 crc kubenswrapper[4922]: I0930 00:06:56.927044 4922 generic.go:334] "Generic (PLEG): container finished" podID="649e7de2-5622-4c10-81c4-7a600e720f94" containerID="ff57222153164eecac2552dfb098c7a25ea0edfe94aa2bfbece4b8ffa8600afe" exitCode=0 Sep 30 00:06:56 crc kubenswrapper[4922]: I0930 00:06:56.927154 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-56zpk" event={"ID":"649e7de2-5622-4c10-81c4-7a600e720f94","Type":"ContainerDied","Data":"ff57222153164eecac2552dfb098c7a25ea0edfe94aa2bfbece4b8ffa8600afe"} Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.669991 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-t7ndh"] Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.673879 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.676283 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.681150 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-t7ndh"] Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.804118 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.804437 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-combined-ca-bundle\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.804636 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data-merged\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.804784 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-scripts\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.905732 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data-merged\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.905795 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-scripts\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.905852 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.905905 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-combined-ca-bundle\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.907411 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data-merged\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.909987 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-scripts\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.913046 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-combined-ca-bundle\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.914318 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data\") pod \"octavia-db-sync-t7ndh\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:58 crc kubenswrapper[4922]: I0930 00:06:58.991691 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:06:59 crc kubenswrapper[4922]: I0930 00:06:59.449543 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-t7ndh"] Sep 30 00:06:59 crc kubenswrapper[4922]: I0930 00:06:59.959791 4922 generic.go:334] "Generic (PLEG): container finished" podID="06172216-e6e0-41bc-8622-1eeba0c9bc8b" containerID="23ee53f48508d70204975c5d97e0e42e2f1fc40d0a63f9486229c5a779ceefcb" exitCode=0 Sep 30 00:06:59 crc kubenswrapper[4922]: I0930 00:06:59.960078 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-t7ndh" event={"ID":"06172216-e6e0-41bc-8622-1eeba0c9bc8b","Type":"ContainerDied","Data":"23ee53f48508d70204975c5d97e0e42e2f1fc40d0a63f9486229c5a779ceefcb"} Sep 30 00:06:59 crc kubenswrapper[4922]: I0930 00:06:59.960105 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-t7ndh" event={"ID":"06172216-e6e0-41bc-8622-1eeba0c9bc8b","Type":"ContainerStarted","Data":"79ba9ad506c20ac43f01d927cbf35811c79aa738605bc9ec292c6dbe0e230274"} Sep 30 00:06:59 crc kubenswrapper[4922]: I0930 00:06:59.966121 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-56zpk" event={"ID":"649e7de2-5622-4c10-81c4-7a600e720f94","Type":"ContainerStarted","Data":"213597a9194e3ecdf1bfe1f0887e582bfa726dc726cede9dc704bcfbf4313667"} Sep 30 00:06:59 crc kubenswrapper[4922]: I0930 00:06:59.966381 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:07:00 crc kubenswrapper[4922]: I0930 00:07:00.006408 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-56zpk" podStartSLOduration=2.539766741 podStartE2EDuration="9.006368642s" podCreationTimestamp="2025-09-30 00:06:51 +0000 UTC" firstStartedPulling="2025-09-30 00:06:52.243268562 +0000 UTC m=+6016.553557375" lastFinishedPulling="2025-09-30 00:06:58.709870463 +0000 UTC m=+6023.020159276" observedRunningTime="2025-09-30 00:06:59.994785066 +0000 UTC m=+6024.305073879" watchObservedRunningTime="2025-09-30 00:07:00.006368642 +0000 UTC m=+6024.316657455" Sep 30 00:07:00 crc kubenswrapper[4922]: I0930 00:07:00.980478 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-t7ndh" event={"ID":"06172216-e6e0-41bc-8622-1eeba0c9bc8b","Type":"ContainerStarted","Data":"2b4838cce3350cf25b2394679ecc60099bfb4f31fb0006d1eae043f6a7f1cb11"} Sep 30 00:07:01 crc kubenswrapper[4922]: I0930 00:07:01.006099 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-t7ndh" podStartSLOduration=3.006078539 podStartE2EDuration="3.006078539s" podCreationTimestamp="2025-09-30 00:06:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:07:00.996759379 +0000 UTC m=+6025.307048212" watchObservedRunningTime="2025-09-30 00:07:01.006078539 +0000 UTC m=+6025.316367352" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.665322 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-b4pfg"] Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.668226 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.670550 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.670612 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.670715 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.678737 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-b4pfg"] Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.692021 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-amphora-certs\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.692162 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-scripts\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.692190 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7e8895c6-5b08-47d8-9b31-941960476555-config-data-merged\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.692229 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-combined-ca-bundle\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.692278 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-config-data\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.692300 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/7e8895c6-5b08-47d8-9b31-941960476555-hm-ports\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.792952 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-scripts\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.792992 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7e8895c6-5b08-47d8-9b31-941960476555-config-data-merged\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.793025 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-combined-ca-bundle\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.793059 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-config-data\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.793097 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/7e8895c6-5b08-47d8-9b31-941960476555-hm-ports\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.793151 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-amphora-certs\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.794075 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7e8895c6-5b08-47d8-9b31-941960476555-config-data-merged\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.794901 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/7e8895c6-5b08-47d8-9b31-941960476555-hm-ports\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.798642 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-amphora-certs\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.807622 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-config-data\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.807805 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-combined-ca-bundle\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.807907 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e8895c6-5b08-47d8-9b31-941960476555-scripts\") pod \"octavia-healthmanager-b4pfg\" (UID: \"7e8895c6-5b08-47d8-9b31-941960476555\") " pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:02 crc kubenswrapper[4922]: I0930 00:07:02.989756 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:03 crc kubenswrapper[4922]: I0930 00:07:03.002662 4922 generic.go:334] "Generic (PLEG): container finished" podID="06172216-e6e0-41bc-8622-1eeba0c9bc8b" containerID="2b4838cce3350cf25b2394679ecc60099bfb4f31fb0006d1eae043f6a7f1cb11" exitCode=0 Sep 30 00:07:03 crc kubenswrapper[4922]: I0930 00:07:03.002709 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-t7ndh" event={"ID":"06172216-e6e0-41bc-8622-1eeba0c9bc8b","Type":"ContainerDied","Data":"2b4838cce3350cf25b2394679ecc60099bfb4f31fb0006d1eae043f6a7f1cb11"} Sep 30 00:07:03 crc kubenswrapper[4922]: I0930 00:07:03.559912 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-b4pfg"] Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.018310 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-b4pfg" event={"ID":"7e8895c6-5b08-47d8-9b31-941960476555","Type":"ContainerStarted","Data":"0d7fcfe5cd2d6435105bdc58aa8af199dfcbf1e7251e01af89cc350b04284e84"} Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.468564 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.531926 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data-merged\") pod \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.531976 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-combined-ca-bundle\") pod \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.532000 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-scripts\") pod \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.532101 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data\") pod \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\" (UID: \"06172216-e6e0-41bc-8622-1eeba0c9bc8b\") " Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.538049 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-scripts" (OuterVolumeSpecName: "scripts") pod "06172216-e6e0-41bc-8622-1eeba0c9bc8b" (UID: "06172216-e6e0-41bc-8622-1eeba0c9bc8b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.551723 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data" (OuterVolumeSpecName: "config-data") pod "06172216-e6e0-41bc-8622-1eeba0c9bc8b" (UID: "06172216-e6e0-41bc-8622-1eeba0c9bc8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.562961 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "06172216-e6e0-41bc-8622-1eeba0c9bc8b" (UID: "06172216-e6e0-41bc-8622-1eeba0c9bc8b"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.567615 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06172216-e6e0-41bc-8622-1eeba0c9bc8b" (UID: "06172216-e6e0-41bc-8622-1eeba0c9bc8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.634113 4922 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data-merged\") on node \"crc\" DevicePath \"\"" Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.634146 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.634156 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:07:04 crc kubenswrapper[4922]: I0930 00:07:04.634163 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06172216-e6e0-41bc-8622-1eeba0c9bc8b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.034601 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-b4pfg" event={"ID":"7e8895c6-5b08-47d8-9b31-941960476555","Type":"ContainerStarted","Data":"3ea564270fe8aff688ce27983fd69657b11bcf20f8a4425ce25b34473e23deb6"} Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.039104 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-t7ndh" event={"ID":"06172216-e6e0-41bc-8622-1eeba0c9bc8b","Type":"ContainerDied","Data":"79ba9ad506c20ac43f01d927cbf35811c79aa738605bc9ec292c6dbe0e230274"} Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.039151 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79ba9ad506c20ac43f01d927cbf35811c79aa738605bc9ec292c6dbe0e230274" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.039189 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-t7ndh" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.408714 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-nznck"] Sep 30 00:07:05 crc kubenswrapper[4922]: E0930 00:07:05.409341 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06172216-e6e0-41bc-8622-1eeba0c9bc8b" containerName="octavia-db-sync" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.409355 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="06172216-e6e0-41bc-8622-1eeba0c9bc8b" containerName="octavia-db-sync" Sep 30 00:07:05 crc kubenswrapper[4922]: E0930 00:07:05.409404 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06172216-e6e0-41bc-8622-1eeba0c9bc8b" containerName="init" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.409410 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="06172216-e6e0-41bc-8622-1eeba0c9bc8b" containerName="init" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.409600 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="06172216-e6e0-41bc-8622-1eeba0c9bc8b" containerName="octavia-db-sync" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.410576 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.412806 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.415777 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.436731 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-nznck"] Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.448283 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-scripts\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.448325 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-combined-ca-bundle\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.448356 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/632a4144-06fe-4caf-8063-7314dfb2b64d-config-data-merged\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.449200 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-amphora-certs\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.449264 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-config-data\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.449322 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/632a4144-06fe-4caf-8063-7314dfb2b64d-hm-ports\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.551815 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-scripts\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.551871 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-combined-ca-bundle\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.551905 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/632a4144-06fe-4caf-8063-7314dfb2b64d-config-data-merged\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.552062 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-amphora-certs\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.552118 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-config-data\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.552162 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/632a4144-06fe-4caf-8063-7314dfb2b64d-hm-ports\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.554142 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/632a4144-06fe-4caf-8063-7314dfb2b64d-config-data-merged\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.554340 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/632a4144-06fe-4caf-8063-7314dfb2b64d-hm-ports\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.562790 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-config-data\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.563613 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-scripts\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.572126 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-combined-ca-bundle\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.572814 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/632a4144-06fe-4caf-8063-7314dfb2b64d-amphora-certs\") pod \"octavia-housekeeping-nznck\" (UID: \"632a4144-06fe-4caf-8063-7314dfb2b64d\") " pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:05 crc kubenswrapper[4922]: I0930 00:07:05.747062 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:06 crc kubenswrapper[4922]: I0930 00:07:06.049602 4922 generic.go:334] "Generic (PLEG): container finished" podID="7e8895c6-5b08-47d8-9b31-941960476555" containerID="3ea564270fe8aff688ce27983fd69657b11bcf20f8a4425ce25b34473e23deb6" exitCode=0 Sep 30 00:07:06 crc kubenswrapper[4922]: I0930 00:07:06.049662 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-b4pfg" event={"ID":"7e8895c6-5b08-47d8-9b31-941960476555","Type":"ContainerDied","Data":"3ea564270fe8aff688ce27983fd69657b11bcf20f8a4425ce25b34473e23deb6"} Sep 30 00:07:06 crc kubenswrapper[4922]: I0930 00:07:06.265223 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-nznck"] Sep 30 00:07:06 crc kubenswrapper[4922]: W0930 00:07:06.279671 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod632a4144_06fe_4caf_8063_7314dfb2b64d.slice/crio-def7cc3975fb39058159d0721789427de1bfb0fd41f79bd79e803e483c942b2a WatchSource:0}: Error finding container def7cc3975fb39058159d0721789427de1bfb0fd41f79bd79e803e483c942b2a: Status 404 returned error can't find the container with id def7cc3975fb39058159d0721789427de1bfb0fd41f79bd79e803e483c942b2a Sep 30 00:07:06 crc kubenswrapper[4922]: I0930 00:07:06.714151 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-56zpk" Sep 30 00:07:07 crc kubenswrapper[4922]: I0930 00:07:07.060775 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-b4pfg" event={"ID":"7e8895c6-5b08-47d8-9b31-941960476555","Type":"ContainerStarted","Data":"7c29c5061672d41531ec2f706f1e807c8b227a782eca32cb43d8ca9f14ab9bc0"} Sep 30 00:07:07 crc kubenswrapper[4922]: I0930 00:07:07.061080 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:07 crc kubenswrapper[4922]: I0930 00:07:07.063191 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-nznck" event={"ID":"632a4144-06fe-4caf-8063-7314dfb2b64d","Type":"ContainerStarted","Data":"def7cc3975fb39058159d0721789427de1bfb0fd41f79bd79e803e483c942b2a"} Sep 30 00:07:07 crc kubenswrapper[4922]: I0930 00:07:07.088631 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-b4pfg" podStartSLOduration=5.088606153 podStartE2EDuration="5.088606153s" podCreationTimestamp="2025-09-30 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:07:07.08156419 +0000 UTC m=+6031.391853003" watchObservedRunningTime="2025-09-30 00:07:07.088606153 +0000 UTC m=+6031.398894966" Sep 30 00:07:07 crc kubenswrapper[4922]: I0930 00:07:07.428796 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:07:07 crc kubenswrapper[4922]: E0930 00:07:07.429304 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:07:09 crc kubenswrapper[4922]: I0930 00:07:09.080810 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-nznck" event={"ID":"632a4144-06fe-4caf-8063-7314dfb2b64d","Type":"ContainerStarted","Data":"66c729ccab34334f59b7655ec72fe68a627c3b685a641f3d4f38db8f19ccbd42"} Sep 30 00:07:10 crc kubenswrapper[4922]: I0930 00:07:10.094187 4922 generic.go:334] "Generic (PLEG): container finished" podID="632a4144-06fe-4caf-8063-7314dfb2b64d" containerID="66c729ccab34334f59b7655ec72fe68a627c3b685a641f3d4f38db8f19ccbd42" exitCode=0 Sep 30 00:07:10 crc kubenswrapper[4922]: I0930 00:07:10.094268 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-nznck" event={"ID":"632a4144-06fe-4caf-8063-7314dfb2b64d","Type":"ContainerDied","Data":"66c729ccab34334f59b7655ec72fe68a627c3b685a641f3d4f38db8f19ccbd42"} Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.107166 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-nznck" event={"ID":"632a4144-06fe-4caf-8063-7314dfb2b64d","Type":"ContainerStarted","Data":"f7038039add789edb08defdc4f4669917d486fe4eee9f37773239e441da71f61"} Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.107790 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.138048 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-nznck" podStartSLOduration=4.181877322 podStartE2EDuration="6.138021766s" podCreationTimestamp="2025-09-30 00:07:05 +0000 UTC" firstStartedPulling="2025-09-30 00:07:06.28353385 +0000 UTC m=+6030.593822663" lastFinishedPulling="2025-09-30 00:07:08.239678294 +0000 UTC m=+6032.549967107" observedRunningTime="2025-09-30 00:07:11.130800257 +0000 UTC m=+6035.441089080" watchObservedRunningTime="2025-09-30 00:07:11.138021766 +0000 UTC m=+6035.448310599" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.546516 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-fvl7m"] Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.549316 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.558295 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.558767 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.580410 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-fvl7m"] Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.682498 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-scripts\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.682570 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-config-data-merged\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.682705 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-combined-ca-bundle\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.682784 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-amphora-certs\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.682836 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-hm-ports\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.682869 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-config-data\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.784435 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-combined-ca-bundle\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.784533 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-amphora-certs\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.784594 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-hm-ports\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.784618 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-config-data\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.784716 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-scripts\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.784741 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-config-data-merged\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.785378 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-config-data-merged\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.785986 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-hm-ports\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.791808 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-config-data\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.792711 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-scripts\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.797488 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-amphora-certs\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.802448 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0e2d4bb-07a0-4aae-b471-c8fdeb214d88-combined-ca-bundle\") pod \"octavia-worker-fvl7m\" (UID: \"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88\") " pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:11 crc kubenswrapper[4922]: I0930 00:07:11.875506 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:13 crc kubenswrapper[4922]: W0930 00:07:12.446747 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0e2d4bb_07a0_4aae_b471_c8fdeb214d88.slice/crio-5738849dbc5365d250389283b75d2b0203cddbb830f1d2ece837a2d6ade4440c WatchSource:0}: Error finding container 5738849dbc5365d250389283b75d2b0203cddbb830f1d2ece837a2d6ade4440c: Status 404 returned error can't find the container with id 5738849dbc5365d250389283b75d2b0203cddbb830f1d2ece837a2d6ade4440c Sep 30 00:07:13 crc kubenswrapper[4922]: I0930 00:07:12.456643 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-fvl7m"] Sep 30 00:07:13 crc kubenswrapper[4922]: I0930 00:07:13.155646 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-fvl7m" event={"ID":"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88","Type":"ContainerStarted","Data":"5738849dbc5365d250389283b75d2b0203cddbb830f1d2ece837a2d6ade4440c"} Sep 30 00:07:15 crc kubenswrapper[4922]: I0930 00:07:15.176165 4922 generic.go:334] "Generic (PLEG): container finished" podID="f0e2d4bb-07a0-4aae-b471-c8fdeb214d88" containerID="77aeaa937b70cffb33aa0527b6a3674d2613280460370c89f06268b309114967" exitCode=0 Sep 30 00:07:15 crc kubenswrapper[4922]: I0930 00:07:15.176214 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-fvl7m" event={"ID":"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88","Type":"ContainerDied","Data":"77aeaa937b70cffb33aa0527b6a3674d2613280460370c89f06268b309114967"} Sep 30 00:07:16 crc kubenswrapper[4922]: I0930 00:07:16.188964 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-fvl7m" event={"ID":"f0e2d4bb-07a0-4aae-b471-c8fdeb214d88","Type":"ContainerStarted","Data":"44c261c13057d09203b5b9f17a6c15ca07bf6e71fda46d5b487f3af220e8af92"} Sep 30 00:07:16 crc kubenswrapper[4922]: I0930 00:07:16.189551 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:16 crc kubenswrapper[4922]: I0930 00:07:16.208448 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-fvl7m" podStartSLOduration=3.799405265 podStartE2EDuration="5.208425189s" podCreationTimestamp="2025-09-30 00:07:11 +0000 UTC" firstStartedPulling="2025-09-30 00:07:12.450629662 +0000 UTC m=+6036.760918475" lastFinishedPulling="2025-09-30 00:07:13.859649586 +0000 UTC m=+6038.169938399" observedRunningTime="2025-09-30 00:07:16.207788433 +0000 UTC m=+6040.518077256" watchObservedRunningTime="2025-09-30 00:07:16.208425189 +0000 UTC m=+6040.518714022" Sep 30 00:07:18 crc kubenswrapper[4922]: I0930 00:07:18.062848 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-b4pfg" Sep 30 00:07:19 crc kubenswrapper[4922]: I0930 00:07:19.045355 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-7z76j"] Sep 30 00:07:19 crc kubenswrapper[4922]: I0930 00:07:19.052674 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-7z76j"] Sep 30 00:07:19 crc kubenswrapper[4922]: I0930 00:07:19.421905 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:07:19 crc kubenswrapper[4922]: E0930 00:07:19.422554 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:07:20 crc kubenswrapper[4922]: I0930 00:07:20.435781 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a" path="/var/lib/kubelet/pods/9eccfaa2-a65a-4d40-8a7a-73b98f8ab19a/volumes" Sep 30 00:07:20 crc kubenswrapper[4922]: I0930 00:07:20.779155 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-nznck" Sep 30 00:07:21 crc kubenswrapper[4922]: I0930 00:07:21.258377 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" event={"ID":"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1","Type":"ContainerStarted","Data":"a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec"} Sep 30 00:07:22 crc kubenswrapper[4922]: I0930 00:07:22.270369 4922 generic.go:334] "Generic (PLEG): container finished" podID="fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" containerID="a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec" exitCode=0 Sep 30 00:07:22 crc kubenswrapper[4922]: I0930 00:07:22.270575 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" event={"ID":"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1","Type":"ContainerDied","Data":"a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec"} Sep 30 00:07:24 crc kubenswrapper[4922]: I0930 00:07:24.306079 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" event={"ID":"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1","Type":"ContainerStarted","Data":"2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313"} Sep 30 00:07:24 crc kubenswrapper[4922]: I0930 00:07:24.320869 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" podStartSLOduration=1.654404749 podStartE2EDuration="32.320846387s" podCreationTimestamp="2025-09-30 00:06:52 +0000 UTC" firstStartedPulling="2025-09-30 00:06:53.283021926 +0000 UTC m=+6017.593310739" lastFinishedPulling="2025-09-30 00:07:23.949463554 +0000 UTC m=+6048.259752377" observedRunningTime="2025-09-30 00:07:24.32014069 +0000 UTC m=+6048.630429513" watchObservedRunningTime="2025-09-30 00:07:24.320846387 +0000 UTC m=+6048.631135200" Sep 30 00:07:26 crc kubenswrapper[4922]: I0930 00:07:26.914999 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-fvl7m" Sep 30 00:07:29 crc kubenswrapper[4922]: I0930 00:07:29.059773 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-cfae-account-create-djnqq"] Sep 30 00:07:29 crc kubenswrapper[4922]: I0930 00:07:29.071998 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-cfae-account-create-djnqq"] Sep 30 00:07:30 crc kubenswrapper[4922]: I0930 00:07:30.434142 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7bca374-9fe8-4a69-843c-2a25dfa667e0" path="/var/lib/kubelet/pods/a7bca374-9fe8-4a69-843c-2a25dfa667e0/volumes" Sep 30 00:07:32 crc kubenswrapper[4922]: I0930 00:07:32.422564 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:07:32 crc kubenswrapper[4922]: E0930 00:07:32.422867 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:07:36 crc kubenswrapper[4922]: I0930 00:07:36.047666 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-wztdn"] Sep 30 00:07:36 crc kubenswrapper[4922]: I0930 00:07:36.059851 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-wztdn"] Sep 30 00:07:36 crc kubenswrapper[4922]: I0930 00:07:36.441605 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dfb3142-3cf3-4586-8e2e-5e5ff9dca842" path="/var/lib/kubelet/pods/2dfb3142-3cf3-4586-8e2e-5e5ff9dca842/volumes" Sep 30 00:07:43 crc kubenswrapper[4922]: I0930 00:07:43.422634 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:07:43 crc kubenswrapper[4922]: E0930 00:07:43.425640 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:07:51 crc kubenswrapper[4922]: I0930 00:07:51.861874 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-dr5g8"] Sep 30 00:07:51 crc kubenswrapper[4922]: I0930 00:07:51.862884 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" podUID="fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" containerName="octavia-amphora-httpd" containerID="cri-o://2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313" gracePeriod=30 Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.476869 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.601006 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-amphora-image\") pod \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\" (UID: \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\") " Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.601236 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-httpd-config\") pod \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\" (UID: \"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1\") " Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.635835 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" (UID: "fbf65a33-4792-4cb5-8442-dbdfe2c2dea1"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.637919 4922 generic.go:334] "Generic (PLEG): container finished" podID="fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" containerID="2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313" exitCode=0 Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.637969 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" event={"ID":"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1","Type":"ContainerDied","Data":"2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313"} Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.638005 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" event={"ID":"fbf65a33-4792-4cb5-8442-dbdfe2c2dea1","Type":"ContainerDied","Data":"66f3c0d03677665097180f42aa2b1bca1f901224cc5b3285657767200735562d"} Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.638025 4922 scope.go:117] "RemoveContainer" containerID="2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.638246 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-dr5g8" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.668283 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" (UID: "fbf65a33-4792-4cb5-8442-dbdfe2c2dea1"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.704684 4922 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-amphora-image\") on node \"crc\" DevicePath \"\"" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.704745 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.735900 4922 scope.go:117] "RemoveContainer" containerID="a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.759979 4922 scope.go:117] "RemoveContainer" containerID="2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313" Sep 30 00:07:52 crc kubenswrapper[4922]: E0930 00:07:52.761703 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313\": container with ID starting with 2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313 not found: ID does not exist" containerID="2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.761742 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313"} err="failed to get container status \"2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313\": rpc error: code = NotFound desc = could not find container \"2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313\": container with ID starting with 2f9c01ec40ff393e9a5ec5b7e7d1bec6a65383d26772520db82fac1100451313 not found: ID does not exist" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.761767 4922 scope.go:117] "RemoveContainer" containerID="a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec" Sep 30 00:07:52 crc kubenswrapper[4922]: E0930 00:07:52.762255 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec\": container with ID starting with a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec not found: ID does not exist" containerID="a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.762291 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec"} err="failed to get container status \"a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec\": rpc error: code = NotFound desc = could not find container \"a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec\": container with ID starting with a15b1b2a7846df6969aed73db9f81a789bc634297cde17f83a0e0e7060f074ec not found: ID does not exist" Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.983038 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-dr5g8"] Sep 30 00:07:52 crc kubenswrapper[4922]: I0930 00:07:52.993193 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-dr5g8"] Sep 30 00:07:53 crc kubenswrapper[4922]: I0930 00:07:53.859262 4922 scope.go:117] "RemoveContainer" containerID="bde6a7e3ad9037e999cfc576642da753745d42e8f3aaa6a47ac774d5d03916fb" Sep 30 00:07:53 crc kubenswrapper[4922]: I0930 00:07:53.904825 4922 scope.go:117] "RemoveContainer" containerID="2e9aecf0361f59c55c7dcb26b537d5742f9f50a4d57c8b7b7cfb1d50a9081b84" Sep 30 00:07:53 crc kubenswrapper[4922]: I0930 00:07:53.945293 4922 scope.go:117] "RemoveContainer" containerID="b9e7a55da4830d4a880476a53ba65d2342983ac45e3bbd020f2bfef78fcd1ba1" Sep 30 00:07:54 crc kubenswrapper[4922]: I0930 00:07:54.442351 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" path="/var/lib/kubelet/pods/fbf65a33-4792-4cb5-8442-dbdfe2c2dea1/volumes" Sep 30 00:07:56 crc kubenswrapper[4922]: I0930 00:07:56.454801 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:07:56 crc kubenswrapper[4922]: E0930 00:07:56.456561 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:07:57 crc kubenswrapper[4922]: I0930 00:07:57.782088 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-59f8cff499-9qjl9"] Sep 30 00:07:57 crc kubenswrapper[4922]: E0930 00:07:57.782914 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" containerName="init" Sep 30 00:07:57 crc kubenswrapper[4922]: I0930 00:07:57.782931 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" containerName="init" Sep 30 00:07:57 crc kubenswrapper[4922]: E0930 00:07:57.782982 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" containerName="octavia-amphora-httpd" Sep 30 00:07:57 crc kubenswrapper[4922]: I0930 00:07:57.782993 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" containerName="octavia-amphora-httpd" Sep 30 00:07:57 crc kubenswrapper[4922]: I0930 00:07:57.783221 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbf65a33-4792-4cb5-8442-dbdfe2c2dea1" containerName="octavia-amphora-httpd" Sep 30 00:07:57 crc kubenswrapper[4922]: I0930 00:07:57.784610 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-9qjl9" Sep 30 00:07:57 crc kubenswrapper[4922]: I0930 00:07:57.789363 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Sep 30 00:07:57 crc kubenswrapper[4922]: I0930 00:07:57.795056 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-9qjl9"] Sep 30 00:07:57 crc kubenswrapper[4922]: I0930 00:07:57.936025 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cb3496a2-59f9-4ed5-bef5-7e1e13058f7d-httpd-config\") pod \"octavia-image-upload-59f8cff499-9qjl9\" (UID: \"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d\") " pod="openstack/octavia-image-upload-59f8cff499-9qjl9" Sep 30 00:07:57 crc kubenswrapper[4922]: I0930 00:07:57.936651 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/cb3496a2-59f9-4ed5-bef5-7e1e13058f7d-amphora-image\") pod \"octavia-image-upload-59f8cff499-9qjl9\" (UID: \"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d\") " pod="openstack/octavia-image-upload-59f8cff499-9qjl9" Sep 30 00:07:58 crc kubenswrapper[4922]: I0930 00:07:58.038094 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cb3496a2-59f9-4ed5-bef5-7e1e13058f7d-httpd-config\") pod \"octavia-image-upload-59f8cff499-9qjl9\" (UID: \"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d\") " pod="openstack/octavia-image-upload-59f8cff499-9qjl9" Sep 30 00:07:58 crc kubenswrapper[4922]: I0930 00:07:58.038688 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/cb3496a2-59f9-4ed5-bef5-7e1e13058f7d-amphora-image\") pod \"octavia-image-upload-59f8cff499-9qjl9\" (UID: \"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d\") " pod="openstack/octavia-image-upload-59f8cff499-9qjl9" Sep 30 00:07:58 crc kubenswrapper[4922]: I0930 00:07:58.039094 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/cb3496a2-59f9-4ed5-bef5-7e1e13058f7d-amphora-image\") pod \"octavia-image-upload-59f8cff499-9qjl9\" (UID: \"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d\") " pod="openstack/octavia-image-upload-59f8cff499-9qjl9" Sep 30 00:07:58 crc kubenswrapper[4922]: I0930 00:07:58.046479 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cb3496a2-59f9-4ed5-bef5-7e1e13058f7d-httpd-config\") pod \"octavia-image-upload-59f8cff499-9qjl9\" (UID: \"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d\") " pod="openstack/octavia-image-upload-59f8cff499-9qjl9" Sep 30 00:07:58 crc kubenswrapper[4922]: I0930 00:07:58.104664 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-59f8cff499-9qjl9" Sep 30 00:07:58 crc kubenswrapper[4922]: I0930 00:07:58.559348 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-59f8cff499-9qjl9"] Sep 30 00:07:58 crc kubenswrapper[4922]: I0930 00:07:58.708928 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-9qjl9" event={"ID":"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d","Type":"ContainerStarted","Data":"42271447bd6d8e7f42bda02d4ad6f7540b5015201651f80b088d2463b557f120"} Sep 30 00:07:59 crc kubenswrapper[4922]: I0930 00:07:59.720990 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-9qjl9" event={"ID":"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d","Type":"ContainerStarted","Data":"8541d1781873930a3508efd0fb840725649b0437f9f79ce315a2e19f314acb60"} Sep 30 00:08:00 crc kubenswrapper[4922]: I0930 00:08:00.730202 4922 generic.go:334] "Generic (PLEG): container finished" podID="cb3496a2-59f9-4ed5-bef5-7e1e13058f7d" containerID="8541d1781873930a3508efd0fb840725649b0437f9f79ce315a2e19f314acb60" exitCode=0 Sep 30 00:08:00 crc kubenswrapper[4922]: I0930 00:08:00.730307 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-9qjl9" event={"ID":"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d","Type":"ContainerDied","Data":"8541d1781873930a3508efd0fb840725649b0437f9f79ce315a2e19f314acb60"} Sep 30 00:08:02 crc kubenswrapper[4922]: I0930 00:08:02.750280 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-59f8cff499-9qjl9" event={"ID":"cb3496a2-59f9-4ed5-bef5-7e1e13058f7d","Type":"ContainerStarted","Data":"d6d88177444f8542007e9dfd710cef4d0b5156f957ea1e1d5f6d0c779b3e45e5"} Sep 30 00:08:02 crc kubenswrapper[4922]: I0930 00:08:02.772540 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-59f8cff499-9qjl9" podStartSLOduration=2.028565486 podStartE2EDuration="5.772517382s" podCreationTimestamp="2025-09-30 00:07:57 +0000 UTC" firstStartedPulling="2025-09-30 00:07:58.572821452 +0000 UTC m=+6082.883110265" lastFinishedPulling="2025-09-30 00:08:02.316773348 +0000 UTC m=+6086.627062161" observedRunningTime="2025-09-30 00:08:02.768864702 +0000 UTC m=+6087.079153515" watchObservedRunningTime="2025-09-30 00:08:02.772517382 +0000 UTC m=+6087.082806195" Sep 30 00:08:07 crc kubenswrapper[4922]: I0930 00:08:07.052982 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-c7vkh"] Sep 30 00:08:07 crc kubenswrapper[4922]: I0930 00:08:07.067500 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-c7vkh"] Sep 30 00:08:08 crc kubenswrapper[4922]: I0930 00:08:08.434647 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70010ad7-7568-440e-9521-19e013b68753" path="/var/lib/kubelet/pods/70010ad7-7568-440e-9521-19e013b68753/volumes" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.272843 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jssn4"] Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.275200 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.301433 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jssn4"] Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.371035 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5h22\" (UniqueName: \"kubernetes.io/projected/2e47b9c0-fa25-487b-833e-a56ef0c586eb-kube-api-access-s5h22\") pod \"redhat-marketplace-jssn4\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.371177 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-catalog-content\") pod \"redhat-marketplace-jssn4\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.371222 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-utilities\") pod \"redhat-marketplace-jssn4\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.422488 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:08:09 crc kubenswrapper[4922]: E0930 00:08:09.422731 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.473619 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-catalog-content\") pod \"redhat-marketplace-jssn4\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.473701 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-utilities\") pod \"redhat-marketplace-jssn4\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.473835 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5h22\" (UniqueName: \"kubernetes.io/projected/2e47b9c0-fa25-487b-833e-a56ef0c586eb-kube-api-access-s5h22\") pod \"redhat-marketplace-jssn4\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.474176 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-catalog-content\") pod \"redhat-marketplace-jssn4\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.474234 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-utilities\") pod \"redhat-marketplace-jssn4\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.494387 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5h22\" (UniqueName: \"kubernetes.io/projected/2e47b9c0-fa25-487b-833e-a56ef0c586eb-kube-api-access-s5h22\") pod \"redhat-marketplace-jssn4\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:09 crc kubenswrapper[4922]: I0930 00:08:09.599613 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:10 crc kubenswrapper[4922]: I0930 00:08:10.069511 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jssn4"] Sep 30 00:08:10 crc kubenswrapper[4922]: I0930 00:08:10.824286 4922 generic.go:334] "Generic (PLEG): container finished" podID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerID="66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636" exitCode=0 Sep 30 00:08:10 crc kubenswrapper[4922]: I0930 00:08:10.824586 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jssn4" event={"ID":"2e47b9c0-fa25-487b-833e-a56ef0c586eb","Type":"ContainerDied","Data":"66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636"} Sep 30 00:08:10 crc kubenswrapper[4922]: I0930 00:08:10.824610 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jssn4" event={"ID":"2e47b9c0-fa25-487b-833e-a56ef0c586eb","Type":"ContainerStarted","Data":"0eaadcd161a5c5764e5e83387909367953ef20880ea7de084f6aab69543e23f2"} Sep 30 00:08:12 crc kubenswrapper[4922]: I0930 00:08:12.844668 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jssn4" event={"ID":"2e47b9c0-fa25-487b-833e-a56ef0c586eb","Type":"ContainerStarted","Data":"096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416"} Sep 30 00:08:13 crc kubenswrapper[4922]: I0930 00:08:13.856163 4922 generic.go:334] "Generic (PLEG): container finished" podID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerID="096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416" exitCode=0 Sep 30 00:08:13 crc kubenswrapper[4922]: I0930 00:08:13.856220 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jssn4" event={"ID":"2e47b9c0-fa25-487b-833e-a56ef0c586eb","Type":"ContainerDied","Data":"096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416"} Sep 30 00:08:14 crc kubenswrapper[4922]: I0930 00:08:14.875036 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jssn4" event={"ID":"2e47b9c0-fa25-487b-833e-a56ef0c586eb","Type":"ContainerStarted","Data":"265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a"} Sep 30 00:08:14 crc kubenswrapper[4922]: I0930 00:08:14.892329 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jssn4" podStartSLOduration=2.389636793 podStartE2EDuration="5.892312095s" podCreationTimestamp="2025-09-30 00:08:09 +0000 UTC" firstStartedPulling="2025-09-30 00:08:10.8269341 +0000 UTC m=+6095.137222913" lastFinishedPulling="2025-09-30 00:08:14.329609362 +0000 UTC m=+6098.639898215" observedRunningTime="2025-09-30 00:08:14.889491796 +0000 UTC m=+6099.199780599" watchObservedRunningTime="2025-09-30 00:08:14.892312095 +0000 UTC m=+6099.202600908" Sep 30 00:08:17 crc kubenswrapper[4922]: I0930 00:08:17.036265 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-e944-account-create-t878l"] Sep 30 00:08:17 crc kubenswrapper[4922]: I0930 00:08:17.044668 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-e944-account-create-t878l"] Sep 30 00:08:18 crc kubenswrapper[4922]: I0930 00:08:18.437026 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b352964c-1637-400a-b4d5-5c4ff1bdb4a4" path="/var/lib/kubelet/pods/b352964c-1637-400a-b4d5-5c4ff1bdb4a4/volumes" Sep 30 00:08:19 crc kubenswrapper[4922]: I0930 00:08:19.600069 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:19 crc kubenswrapper[4922]: I0930 00:08:19.601542 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:19 crc kubenswrapper[4922]: I0930 00:08:19.693008 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:19 crc kubenswrapper[4922]: I0930 00:08:19.973839 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:20 crc kubenswrapper[4922]: I0930 00:08:20.017007 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jssn4"] Sep 30 00:08:21 crc kubenswrapper[4922]: I0930 00:08:21.947288 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jssn4" podUID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerName="registry-server" containerID="cri-o://265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a" gracePeriod=2 Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.423491 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:08:22 crc kubenswrapper[4922]: E0930 00:08:22.424067 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.587855 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.680253 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5h22\" (UniqueName: \"kubernetes.io/projected/2e47b9c0-fa25-487b-833e-a56ef0c586eb-kube-api-access-s5h22\") pod \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.680436 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-utilities\") pod \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.680605 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-catalog-content\") pod \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\" (UID: \"2e47b9c0-fa25-487b-833e-a56ef0c586eb\") " Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.682280 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-utilities" (OuterVolumeSpecName: "utilities") pod "2e47b9c0-fa25-487b-833e-a56ef0c586eb" (UID: "2e47b9c0-fa25-487b-833e-a56ef0c586eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.691496 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e47b9c0-fa25-487b-833e-a56ef0c586eb-kube-api-access-s5h22" (OuterVolumeSpecName: "kube-api-access-s5h22") pod "2e47b9c0-fa25-487b-833e-a56ef0c586eb" (UID: "2e47b9c0-fa25-487b-833e-a56ef0c586eb"). InnerVolumeSpecName "kube-api-access-s5h22". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.700568 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e47b9c0-fa25-487b-833e-a56ef0c586eb" (UID: "2e47b9c0-fa25-487b-833e-a56ef0c586eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.782454 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.782580 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e47b9c0-fa25-487b-833e-a56ef0c586eb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.782638 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5h22\" (UniqueName: \"kubernetes.io/projected/2e47b9c0-fa25-487b-833e-a56ef0c586eb-kube-api-access-s5h22\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.985272 4922 generic.go:334] "Generic (PLEG): container finished" podID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerID="265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a" exitCode=0 Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.985898 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jssn4" event={"ID":"2e47b9c0-fa25-487b-833e-a56ef0c586eb","Type":"ContainerDied","Data":"265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a"} Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.985976 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jssn4" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.986271 4922 scope.go:117] "RemoveContainer" containerID="265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a" Sep 30 00:08:22 crc kubenswrapper[4922]: I0930 00:08:22.987250 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jssn4" event={"ID":"2e47b9c0-fa25-487b-833e-a56ef0c586eb","Type":"ContainerDied","Data":"0eaadcd161a5c5764e5e83387909367953ef20880ea7de084f6aab69543e23f2"} Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.035868 4922 scope.go:117] "RemoveContainer" containerID="096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416" Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.047072 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jssn4"] Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.112244 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jssn4"] Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.120327 4922 scope.go:117] "RemoveContainer" containerID="66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636" Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.143470 4922 scope.go:117] "RemoveContainer" containerID="265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a" Sep 30 00:08:23 crc kubenswrapper[4922]: E0930 00:08:23.143874 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a\": container with ID starting with 265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a not found: ID does not exist" containerID="265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a" Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.143918 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a"} err="failed to get container status \"265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a\": rpc error: code = NotFound desc = could not find container \"265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a\": container with ID starting with 265ddca6bc25757f5b0f95320c72843a53be6006f86304b9008ac60a05841d0a not found: ID does not exist" Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.143953 4922 scope.go:117] "RemoveContainer" containerID="096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416" Sep 30 00:08:23 crc kubenswrapper[4922]: E0930 00:08:23.144189 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416\": container with ID starting with 096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416 not found: ID does not exist" containerID="096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416" Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.144226 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416"} err="failed to get container status \"096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416\": rpc error: code = NotFound desc = could not find container \"096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416\": container with ID starting with 096ee8be1e8b9f3d6a6e651f8186593fb0e05dd37fe03f847beadc4c9068a416 not found: ID does not exist" Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.144249 4922 scope.go:117] "RemoveContainer" containerID="66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636" Sep 30 00:08:23 crc kubenswrapper[4922]: E0930 00:08:23.144487 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636\": container with ID starting with 66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636 not found: ID does not exist" containerID="66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636" Sep 30 00:08:23 crc kubenswrapper[4922]: I0930 00:08:23.144518 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636"} err="failed to get container status \"66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636\": rpc error: code = NotFound desc = could not find container \"66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636\": container with ID starting with 66de21773e8f904f7f4f8e39fb8286edebbb6e9eaa3904bd663185e1b3bdb636 not found: ID does not exist" Sep 30 00:08:24 crc kubenswrapper[4922]: I0930 00:08:24.442738 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" path="/var/lib/kubelet/pods/2e47b9c0-fa25-487b-833e-a56ef0c586eb/volumes" Sep 30 00:08:26 crc kubenswrapper[4922]: I0930 00:08:26.047142 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-kntp5"] Sep 30 00:08:26 crc kubenswrapper[4922]: I0930 00:08:26.059981 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-kntp5"] Sep 30 00:08:26 crc kubenswrapper[4922]: I0930 00:08:26.440627 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dc028b1-0993-4f86-a56f-d2f2043fc999" path="/var/lib/kubelet/pods/9dc028b1-0993-4f86-a56f-d2f2043fc999/volumes" Sep 30 00:08:36 crc kubenswrapper[4922]: I0930 00:08:36.430496 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:08:36 crc kubenswrapper[4922]: E0930 00:08:36.437897 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.677659 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-644d7d6565-424tm"] Sep 30 00:08:37 crc kubenswrapper[4922]: E0930 00:08:37.678339 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerName="extract-utilities" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.678353 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerName="extract-utilities" Sep 30 00:08:37 crc kubenswrapper[4922]: E0930 00:08:37.678366 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerName="extract-content" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.678371 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerName="extract-content" Sep 30 00:08:37 crc kubenswrapper[4922]: E0930 00:08:37.678406 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerName="registry-server" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.678414 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerName="registry-server" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.678629 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e47b9c0-fa25-487b-833e-a56ef0c586eb" containerName="registry-server" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.679629 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.682309 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.683920 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-644kg" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.683965 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.688076 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.714258 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-644d7d6565-424tm"] Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.751378 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.751643 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerName="glance-log" containerID="cri-o://d0dbe8ea80b2c96687b2aaedecd5b006bb2c4ae1ad09b94440abe47c5645ac3a" gracePeriod=30 Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.751784 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerName="glance-httpd" containerID="cri-o://084e370aa0a43bd4d2727702dbb0a175fab65b9cbae2db19fc2567207a88880c" gracePeriod=30 Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.801419 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b8db4dbc5-pznzw"] Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.807422 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.819192 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b8db4dbc5-pznzw"] Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.831336 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a67a0a27-0c64-462e-b3f5-388a0ec4478e-horizon-secret-key\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.831476 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a67a0a27-0c64-462e-b3f5-388a0ec4478e-logs\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.831563 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcnbv\" (UniqueName: \"kubernetes.io/projected/a67a0a27-0c64-462e-b3f5-388a0ec4478e-kube-api-access-lcnbv\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.831755 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-config-data\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.831787 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-scripts\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.833030 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.833253 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerName="glance-log" containerID="cri-o://ce215a73f76762f9222329879baa9c8056c4ab48717537ec703e51021124c4c1" gracePeriod=30 Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.833420 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerName="glance-httpd" containerID="cri-o://a5c4fd02a838b947bdc1c121d04ea83d5bd986c680b033ec708d5f06fe6cac42" gracePeriod=30 Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.933330 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-config-data\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.933419 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-scripts\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.933517 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a67a0a27-0c64-462e-b3f5-388a0ec4478e-horizon-secret-key\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.933558 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7w8xs\" (UniqueName: \"kubernetes.io/projected/4672fca9-dbff-4881-98be-58df50e49fcb-kube-api-access-7w8xs\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.933671 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-scripts\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.933697 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4672fca9-dbff-4881-98be-58df50e49fcb-logs\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.933764 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4672fca9-dbff-4881-98be-58df50e49fcb-horizon-secret-key\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.933788 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a67a0a27-0c64-462e-b3f5-388a0ec4478e-logs\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.934506 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a67a0a27-0c64-462e-b3f5-388a0ec4478e-logs\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.934571 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcnbv\" (UniqueName: \"kubernetes.io/projected/a67a0a27-0c64-462e-b3f5-388a0ec4478e-kube-api-access-lcnbv\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.934591 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-scripts\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.934701 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-config-data\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.935017 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-config-data\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.941857 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a67a0a27-0c64-462e-b3f5-388a0ec4478e-horizon-secret-key\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:37 crc kubenswrapper[4922]: I0930 00:08:37.955826 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcnbv\" (UniqueName: \"kubernetes.io/projected/a67a0a27-0c64-462e-b3f5-388a0ec4478e-kube-api-access-lcnbv\") pod \"horizon-644d7d6565-424tm\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.003908 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.042122 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-config-data\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.042512 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7w8xs\" (UniqueName: \"kubernetes.io/projected/4672fca9-dbff-4881-98be-58df50e49fcb-kube-api-access-7w8xs\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.042582 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-scripts\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.042608 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4672fca9-dbff-4881-98be-58df50e49fcb-logs\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.042650 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4672fca9-dbff-4881-98be-58df50e49fcb-horizon-secret-key\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.043102 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4672fca9-dbff-4881-98be-58df50e49fcb-logs\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.043412 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-scripts\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.043581 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-config-data\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.045911 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4672fca9-dbff-4881-98be-58df50e49fcb-horizon-secret-key\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.057901 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7w8xs\" (UniqueName: \"kubernetes.io/projected/4672fca9-dbff-4881-98be-58df50e49fcb-kube-api-access-7w8xs\") pod \"horizon-7b8db4dbc5-pznzw\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.126277 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.161833 4922 generic.go:334] "Generic (PLEG): container finished" podID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerID="d0dbe8ea80b2c96687b2aaedecd5b006bb2c4ae1ad09b94440abe47c5645ac3a" exitCode=143 Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.161919 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2639f2fb-9a5f-4831-9302-3e2d6cd82d84","Type":"ContainerDied","Data":"d0dbe8ea80b2c96687b2aaedecd5b006bb2c4ae1ad09b94440abe47c5645ac3a"} Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.163819 4922 generic.go:334] "Generic (PLEG): container finished" podID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerID="ce215a73f76762f9222329879baa9c8056c4ab48717537ec703e51021124c4c1" exitCode=143 Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.163935 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"96af43a5-5c5a-4d87-9860-dc79a4e0a54a","Type":"ContainerDied","Data":"ce215a73f76762f9222329879baa9c8056c4ab48717537ec703e51021124c4c1"} Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.360635 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-644d7d6565-424tm"] Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.385547 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-749d8576cc-xm7k7"] Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.387074 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.397791 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-749d8576cc-xm7k7"] Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.522141 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-644d7d6565-424tm"] Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.554344 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxxfr\" (UniqueName: \"kubernetes.io/projected/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-kube-api-access-bxxfr\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.554465 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-logs\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.554550 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-scripts\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.554595 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-horizon-secret-key\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.554694 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-config-data\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.656690 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxxfr\" (UniqueName: \"kubernetes.io/projected/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-kube-api-access-bxxfr\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.657112 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-logs\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.657166 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-scripts\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.657564 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-logs\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.658157 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-scripts\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.658246 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-horizon-secret-key\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.659193 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-config-data\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.660332 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-config-data\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.664543 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-horizon-secret-key\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.672612 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b8db4dbc5-pznzw"] Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.674934 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxxfr\" (UniqueName: \"kubernetes.io/projected/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-kube-api-access-bxxfr\") pod \"horizon-749d8576cc-xm7k7\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:38 crc kubenswrapper[4922]: W0930 00:08:38.676355 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4672fca9_dbff_4881_98be_58df50e49fcb.slice/crio-0e4cb0dac79b548987675a2c23864e2e527acb44e2133d4df95715ba57ac7f9a WatchSource:0}: Error finding container 0e4cb0dac79b548987675a2c23864e2e527acb44e2133d4df95715ba57ac7f9a: Status 404 returned error can't find the container with id 0e4cb0dac79b548987675a2c23864e2e527acb44e2133d4df95715ba57ac7f9a Sep 30 00:08:38 crc kubenswrapper[4922]: I0930 00:08:38.720611 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:39 crc kubenswrapper[4922]: W0930 00:08:39.170416 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b6bfd79_b7f3_4858_a095_4cbf2e6db3f5.slice/crio-536cbddf0e6dfaebd7fe9f3810dad8a244c20239b0d74ae32f369ff25235c321 WatchSource:0}: Error finding container 536cbddf0e6dfaebd7fe9f3810dad8a244c20239b0d74ae32f369ff25235c321: Status 404 returned error can't find the container with id 536cbddf0e6dfaebd7fe9f3810dad8a244c20239b0d74ae32f369ff25235c321 Sep 30 00:08:39 crc kubenswrapper[4922]: I0930 00:08:39.178361 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8db4dbc5-pznzw" event={"ID":"4672fca9-dbff-4881-98be-58df50e49fcb","Type":"ContainerStarted","Data":"0e4cb0dac79b548987675a2c23864e2e527acb44e2133d4df95715ba57ac7f9a"} Sep 30 00:08:39 crc kubenswrapper[4922]: I0930 00:08:39.179361 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-749d8576cc-xm7k7"] Sep 30 00:08:39 crc kubenswrapper[4922]: I0930 00:08:39.180777 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d7d6565-424tm" event={"ID":"a67a0a27-0c64-462e-b3f5-388a0ec4478e","Type":"ContainerStarted","Data":"dacbe4d8c5674bb38915661f295fe8e38bb81aa204a48a344df9d3d71c243579"} Sep 30 00:08:40 crc kubenswrapper[4922]: I0930 00:08:40.201227 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749d8576cc-xm7k7" event={"ID":"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5","Type":"ContainerStarted","Data":"536cbddf0e6dfaebd7fe9f3810dad8a244c20239b0d74ae32f369ff25235c321"} Sep 30 00:08:41 crc kubenswrapper[4922]: I0930 00:08:41.214674 4922 generic.go:334] "Generic (PLEG): container finished" podID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerID="084e370aa0a43bd4d2727702dbb0a175fab65b9cbae2db19fc2567207a88880c" exitCode=0 Sep 30 00:08:41 crc kubenswrapper[4922]: I0930 00:08:41.214735 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2639f2fb-9a5f-4831-9302-3e2d6cd82d84","Type":"ContainerDied","Data":"084e370aa0a43bd4d2727702dbb0a175fab65b9cbae2db19fc2567207a88880c"} Sep 30 00:08:41 crc kubenswrapper[4922]: I0930 00:08:41.220055 4922 generic.go:334] "Generic (PLEG): container finished" podID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerID="a5c4fd02a838b947bdc1c121d04ea83d5bd986c680b033ec708d5f06fe6cac42" exitCode=0 Sep 30 00:08:41 crc kubenswrapper[4922]: I0930 00:08:41.220146 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"96af43a5-5c5a-4d87-9860-dc79a4e0a54a","Type":"ContainerDied","Data":"a5c4fd02a838b947bdc1c121d04ea83d5bd986c680b033ec708d5f06fe6cac42"} Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.392188 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.519774 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-combined-ca-bundle\") pod \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.520037 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lch88\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-kube-api-access-lch88\") pod \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.520106 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-httpd-run\") pod \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.520138 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-config-data\") pod \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.520161 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-logs\") pod \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.520217 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-ceph\") pod \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.520435 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-scripts\") pod \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\" (UID: \"96af43a5-5c5a-4d87-9860-dc79a4e0a54a\") " Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.521321 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "96af43a5-5c5a-4d87-9860-dc79a4e0a54a" (UID: "96af43a5-5c5a-4d87-9860-dc79a4e0a54a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.521357 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-logs" (OuterVolumeSpecName: "logs") pod "96af43a5-5c5a-4d87-9860-dc79a4e0a54a" (UID: "96af43a5-5c5a-4d87-9860-dc79a4e0a54a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.528506 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-scripts" (OuterVolumeSpecName: "scripts") pod "96af43a5-5c5a-4d87-9860-dc79a4e0a54a" (UID: "96af43a5-5c5a-4d87-9860-dc79a4e0a54a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.532148 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-kube-api-access-lch88" (OuterVolumeSpecName: "kube-api-access-lch88") pod "96af43a5-5c5a-4d87-9860-dc79a4e0a54a" (UID: "96af43a5-5c5a-4d87-9860-dc79a4e0a54a"). InnerVolumeSpecName "kube-api-access-lch88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.532190 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-ceph" (OuterVolumeSpecName: "ceph") pod "96af43a5-5c5a-4d87-9860-dc79a4e0a54a" (UID: "96af43a5-5c5a-4d87-9860-dc79a4e0a54a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.572704 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96af43a5-5c5a-4d87-9860-dc79a4e0a54a" (UID: "96af43a5-5c5a-4d87-9860-dc79a4e0a54a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.608628 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-config-data" (OuterVolumeSpecName: "config-data") pod "96af43a5-5c5a-4d87-9860-dc79a4e0a54a" (UID: "96af43a5-5c5a-4d87-9860-dc79a4e0a54a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.623287 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.623322 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.623334 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.623344 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.623354 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.623365 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:45 crc kubenswrapper[4922]: I0930 00:08:45.623377 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lch88\" (UniqueName: \"kubernetes.io/projected/96af43a5-5c5a-4d87-9860-dc79a4e0a54a-kube-api-access-lch88\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.291864 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d7d6565-424tm" event={"ID":"a67a0a27-0c64-462e-b3f5-388a0ec4478e","Type":"ContainerStarted","Data":"ef44254e5757bca3476123f62e9aad771394c593175c53f01c1afaa3d466cbc2"} Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.292198 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d7d6565-424tm" event={"ID":"a67a0a27-0c64-462e-b3f5-388a0ec4478e","Type":"ContainerStarted","Data":"c2fef38a011f431279d16cf8803354466e1edd824477f97958f77277716844cf"} Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.292115 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-644d7d6565-424tm" podUID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerName="horizon" containerID="cri-o://ef44254e5757bca3476123f62e9aad771394c593175c53f01c1afaa3d466cbc2" gracePeriod=30 Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.291924 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-644d7d6565-424tm" podUID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerName="horizon-log" containerID="cri-o://c2fef38a011f431279d16cf8803354466e1edd824477f97958f77277716844cf" gracePeriod=30 Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.303032 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749d8576cc-xm7k7" event={"ID":"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5","Type":"ContainerStarted","Data":"f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0"} Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.303082 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749d8576cc-xm7k7" event={"ID":"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5","Type":"ContainerStarted","Data":"9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f"} Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.306529 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"2639f2fb-9a5f-4831-9302-3e2d6cd82d84","Type":"ContainerDied","Data":"c41213cf139d2d2a28cfce684711fea3e896333ab852626b70fd4d5f2ebc99de"} Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.306602 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c41213cf139d2d2a28cfce684711fea3e896333ab852626b70fd4d5f2ebc99de" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.314462 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.315494 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"96af43a5-5c5a-4d87-9860-dc79a4e0a54a","Type":"ContainerDied","Data":"071b31e9d58eed79c9cd4c6a53bfe45252ace97afa91fbedb505586214821807"} Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.315583 4922 scope.go:117] "RemoveContainer" containerID="a5c4fd02a838b947bdc1c121d04ea83d5bd986c680b033ec708d5f06fe6cac42" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.324172 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-644d7d6565-424tm" podStartSLOduration=2.80898015 podStartE2EDuration="9.324154759s" podCreationTimestamp="2025-09-30 00:08:37 +0000 UTC" firstStartedPulling="2025-09-30 00:08:38.532584827 +0000 UTC m=+6122.842873640" lastFinishedPulling="2025-09-30 00:08:45.047759436 +0000 UTC m=+6129.358048249" observedRunningTime="2025-09-30 00:08:46.307576849 +0000 UTC m=+6130.617865702" watchObservedRunningTime="2025-09-30 00:08:46.324154759 +0000 UTC m=+6130.634443572" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.333880 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-749d8576cc-xm7k7" podStartSLOduration=2.423357206 podStartE2EDuration="8.333861648s" podCreationTimestamp="2025-09-30 00:08:38 +0000 UTC" firstStartedPulling="2025-09-30 00:08:39.172784331 +0000 UTC m=+6123.483073154" lastFinishedPulling="2025-09-30 00:08:45.083288783 +0000 UTC m=+6129.393577596" observedRunningTime="2025-09-30 00:08:46.331661624 +0000 UTC m=+6130.641950437" watchObservedRunningTime="2025-09-30 00:08:46.333861648 +0000 UTC m=+6130.644150451" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.335974 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8db4dbc5-pznzw" event={"ID":"4672fca9-dbff-4881-98be-58df50e49fcb","Type":"ContainerStarted","Data":"4a7b189fb16904438688a60a0be32235d4508701ef61c499971b9387cdf48875"} Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.336024 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8db4dbc5-pznzw" event={"ID":"4672fca9-dbff-4881-98be-58df50e49fcb","Type":"ContainerStarted","Data":"3a500f8647fac29dcb8f4e187fbbf4b43fa61eeae8d714722c10ed6162a6d8a1"} Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.349354 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.364714 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b8db4dbc5-pznzw" podStartSLOduration=2.989033871 podStartE2EDuration="9.364697659s" podCreationTimestamp="2025-09-30 00:08:37 +0000 UTC" firstStartedPulling="2025-09-30 00:08:38.678855875 +0000 UTC m=+6122.989144688" lastFinishedPulling="2025-09-30 00:08:45.054519663 +0000 UTC m=+6129.364808476" observedRunningTime="2025-09-30 00:08:46.358208299 +0000 UTC m=+6130.668497112" watchObservedRunningTime="2025-09-30 00:08:46.364697659 +0000 UTC m=+6130.674986472" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.367864 4922 scope.go:117] "RemoveContainer" containerID="ce215a73f76762f9222329879baa9c8056c4ab48717537ec703e51021124c4c1" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.413637 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.453797 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.455575 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-config-data\") pod \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.455632 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-logs\") pod \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.455706 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsdqh\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-kube-api-access-wsdqh\") pod \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.455753 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-scripts\") pod \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.455815 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-ceph\") pod \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.455866 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-combined-ca-bundle\") pod \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.455923 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-httpd-run\") pod \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\" (UID: \"2639f2fb-9a5f-4831-9302-3e2d6cd82d84\") " Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.461183 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2639f2fb-9a5f-4831-9302-3e2d6cd82d84" (UID: "2639f2fb-9a5f-4831-9302-3e2d6cd82d84"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.463061 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-kube-api-access-wsdqh" (OuterVolumeSpecName: "kube-api-access-wsdqh") pod "2639f2fb-9a5f-4831-9302-3e2d6cd82d84" (UID: "2639f2fb-9a5f-4831-9302-3e2d6cd82d84"). InnerVolumeSpecName "kube-api-access-wsdqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.463274 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-logs" (OuterVolumeSpecName: "logs") pod "2639f2fb-9a5f-4831-9302-3e2d6cd82d84" (UID: "2639f2fb-9a5f-4831-9302-3e2d6cd82d84"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.466609 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 00:08:46 crc kubenswrapper[4922]: E0930 00:08:46.467005 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerName="glance-httpd" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.467021 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerName="glance-httpd" Sep 30 00:08:46 crc kubenswrapper[4922]: E0930 00:08:46.467032 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerName="glance-httpd" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.467038 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerName="glance-httpd" Sep 30 00:08:46 crc kubenswrapper[4922]: E0930 00:08:46.467064 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerName="glance-log" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.467070 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerName="glance-log" Sep 30 00:08:46 crc kubenswrapper[4922]: E0930 00:08:46.467084 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerName="glance-log" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.467089 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerName="glance-log" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.474309 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerName="glance-httpd" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.474339 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerName="glance-log" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.474349 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" containerName="glance-httpd" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.474460 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" containerName="glance-log" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.476489 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.477248 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-scripts" (OuterVolumeSpecName: "scripts") pod "2639f2fb-9a5f-4831-9302-3e2d6cd82d84" (UID: "2639f2fb-9a5f-4831-9302-3e2d6cd82d84"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.478114 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.479219 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-ceph" (OuterVolumeSpecName: "ceph") pod "2639f2fb-9a5f-4831-9302-3e2d6cd82d84" (UID: "2639f2fb-9a5f-4831-9302-3e2d6cd82d84"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.483911 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.510196 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2639f2fb-9a5f-4831-9302-3e2d6cd82d84" (UID: "2639f2fb-9a5f-4831-9302-3e2d6cd82d84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.542884 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-config-data" (OuterVolumeSpecName: "config-data") pod "2639f2fb-9a5f-4831-9302-3e2d6cd82d84" (UID: "2639f2fb-9a5f-4831-9302-3e2d6cd82d84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.557567 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.557805 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.557904 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh8fn\" (UniqueName: \"kubernetes.io/projected/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-kube-api-access-xh8fn\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558033 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558107 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558201 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558273 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-ceph\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558416 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558493 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsdqh\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-kube-api-access-wsdqh\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558587 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558677 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558751 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558822 4922 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.558891 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2639f2fb-9a5f-4831-9302-3e2d6cd82d84-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.661828 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.662171 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.663082 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh8fn\" (UniqueName: \"kubernetes.io/projected/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-kube-api-access-xh8fn\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.663319 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.662467 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.663750 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.663961 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.664098 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-ceph\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.668810 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.668810 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.668990 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-ceph\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.671626 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-logs\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.672578 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.680067 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh8fn\" (UniqueName: \"kubernetes.io/projected/f9f3333d-2d8b-4d71-8b56-4685c20b0d82-kube-api-access-xh8fn\") pod \"glance-default-internal-api-0\" (UID: \"f9f3333d-2d8b-4d71-8b56-4685c20b0d82\") " pod="openstack/glance-default-internal-api-0" Sep 30 00:08:46 crc kubenswrapper[4922]: I0930 00:08:46.888494 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.351318 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.393901 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.401599 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.424344 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.432329 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.434845 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.440879 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.553681 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 00:08:47 crc kubenswrapper[4922]: W0930 00:08:47.554920 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9f3333d_2d8b_4d71_8b56_4685c20b0d82.slice/crio-1ab480240dd3ff435558e2f85e0dbc2c6c9752f46d152fe17981497aca74fdaf WatchSource:0}: Error finding container 1ab480240dd3ff435558e2f85e0dbc2c6c9752f46d152fe17981497aca74fdaf: Status 404 returned error can't find the container with id 1ab480240dd3ff435558e2f85e0dbc2c6c9752f46d152fe17981497aca74fdaf Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.591326 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba3fa718-9def-4884-a31f-0fb295b35c53-logs\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.591437 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba3fa718-9def-4884-a31f-0fb295b35c53-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.591711 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ba3fa718-9def-4884-a31f-0fb295b35c53-ceph\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.591808 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba3fa718-9def-4884-a31f-0fb295b35c53-config-data\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.591845 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba3fa718-9def-4884-a31f-0fb295b35c53-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.591893 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba3fa718-9def-4884-a31f-0fb295b35c53-scripts\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.591919 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzxhq\" (UniqueName: \"kubernetes.io/projected/ba3fa718-9def-4884-a31f-0fb295b35c53-kube-api-access-rzxhq\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.693532 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ba3fa718-9def-4884-a31f-0fb295b35c53-ceph\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.693593 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba3fa718-9def-4884-a31f-0fb295b35c53-config-data\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.693619 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba3fa718-9def-4884-a31f-0fb295b35c53-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.693654 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba3fa718-9def-4884-a31f-0fb295b35c53-scripts\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.693673 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzxhq\" (UniqueName: \"kubernetes.io/projected/ba3fa718-9def-4884-a31f-0fb295b35c53-kube-api-access-rzxhq\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.693708 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba3fa718-9def-4884-a31f-0fb295b35c53-logs\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.693751 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba3fa718-9def-4884-a31f-0fb295b35c53-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.694293 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba3fa718-9def-4884-a31f-0fb295b35c53-logs\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.694304 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ba3fa718-9def-4884-a31f-0fb295b35c53-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.698946 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba3fa718-9def-4884-a31f-0fb295b35c53-scripts\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.699590 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/ba3fa718-9def-4884-a31f-0fb295b35c53-ceph\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.699750 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba3fa718-9def-4884-a31f-0fb295b35c53-config-data\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.701833 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba3fa718-9def-4884-a31f-0fb295b35c53-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.715080 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzxhq\" (UniqueName: \"kubernetes.io/projected/ba3fa718-9def-4884-a31f-0fb295b35c53-kube-api-access-rzxhq\") pod \"glance-default-external-api-0\" (UID: \"ba3fa718-9def-4884-a31f-0fb295b35c53\") " pod="openstack/glance-default-external-api-0" Sep 30 00:08:47 crc kubenswrapper[4922]: I0930 00:08:47.758589 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.004688 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.126873 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.127292 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.304433 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.375577 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ba3fa718-9def-4884-a31f-0fb295b35c53","Type":"ContainerStarted","Data":"195157a74d841a31fb1fa3ae4a38fb8a2e6a9ccbd3216422cd759ff99faa80e4"} Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.391967 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9f3333d-2d8b-4d71-8b56-4685c20b0d82","Type":"ContainerStarted","Data":"7794f8c9a1482e5117829d655bb7b686db7437e23947e9b6c7cf8e7171b40ff7"} Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.392006 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9f3333d-2d8b-4d71-8b56-4685c20b0d82","Type":"ContainerStarted","Data":"1ab480240dd3ff435558e2f85e0dbc2c6c9752f46d152fe17981497aca74fdaf"} Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.444084 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2639f2fb-9a5f-4831-9302-3e2d6cd82d84" path="/var/lib/kubelet/pods/2639f2fb-9a5f-4831-9302-3e2d6cd82d84/volumes" Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.445884 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96af43a5-5c5a-4d87-9860-dc79a4e0a54a" path="/var/lib/kubelet/pods/96af43a5-5c5a-4d87-9860-dc79a4e0a54a/volumes" Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.721467 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:48 crc kubenswrapper[4922]: I0930 00:08:48.721527 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:08:49 crc kubenswrapper[4922]: I0930 00:08:49.403427 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ba3fa718-9def-4884-a31f-0fb295b35c53","Type":"ContainerStarted","Data":"ef7951a9e7b4bcc7cd07e80f0fc9a749c8e501b78305f95f70b720611ffefd60"} Sep 30 00:08:49 crc kubenswrapper[4922]: I0930 00:08:49.407318 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f9f3333d-2d8b-4d71-8b56-4685c20b0d82","Type":"ContainerStarted","Data":"12d743414f76930615bd07f016f9c2749cde38a97c02ff3024084721367601e2"} Sep 30 00:08:49 crc kubenswrapper[4922]: I0930 00:08:49.453774 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.453750176 podStartE2EDuration="3.453750176s" podCreationTimestamp="2025-09-30 00:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:08:49.433654461 +0000 UTC m=+6133.743943274" watchObservedRunningTime="2025-09-30 00:08:49.453750176 +0000 UTC m=+6133.764038989" Sep 30 00:08:50 crc kubenswrapper[4922]: I0930 00:08:50.416274 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ba3fa718-9def-4884-a31f-0fb295b35c53","Type":"ContainerStarted","Data":"06cc538c6fbdd1b517d5bdd980c9c2d5666bb52bc57a5144256a3ef7a63c21af"} Sep 30 00:08:50 crc kubenswrapper[4922]: I0930 00:08:50.445463 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.445440134 podStartE2EDuration="3.445440134s" podCreationTimestamp="2025-09-30 00:08:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:08:50.439942288 +0000 UTC m=+6134.750231181" watchObservedRunningTime="2025-09-30 00:08:50.445440134 +0000 UTC m=+6134.755728987" Sep 30 00:08:51 crc kubenswrapper[4922]: I0930 00:08:51.421716 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:08:51 crc kubenswrapper[4922]: E0930 00:08:51.422303 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:08:54 crc kubenswrapper[4922]: I0930 00:08:54.092869 4922 scope.go:117] "RemoveContainer" containerID="033fb2006310cf3a957f70939d53a0e4dc5ae89cf462d5ff331e6fdcb52f6110" Sep 30 00:08:54 crc kubenswrapper[4922]: I0930 00:08:54.122870 4922 scope.go:117] "RemoveContainer" containerID="084e370aa0a43bd4d2727702dbb0a175fab65b9cbae2db19fc2567207a88880c" Sep 30 00:08:54 crc kubenswrapper[4922]: I0930 00:08:54.225183 4922 scope.go:117] "RemoveContainer" containerID="d0dbe8ea80b2c96687b2aaedecd5b006bb2c4ae1ad09b94440abe47c5645ac3a" Sep 30 00:08:54 crc kubenswrapper[4922]: I0930 00:08:54.255928 4922 scope.go:117] "RemoveContainer" containerID="6813631fc637aecd46f709a581af8bbab2755c6296c396d7309e229fb02ef2ce" Sep 30 00:08:54 crc kubenswrapper[4922]: I0930 00:08:54.281694 4922 scope.go:117] "RemoveContainer" containerID="1ac76f79b13f78a2c1fee8b2b222d54e1edd1598b39125b838a73bca6494dec5" Sep 30 00:08:56 crc kubenswrapper[4922]: I0930 00:08:56.890185 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:56 crc kubenswrapper[4922]: I0930 00:08:56.890680 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:56 crc kubenswrapper[4922]: I0930 00:08:56.955854 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:56 crc kubenswrapper[4922]: I0930 00:08:56.957361 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:57 crc kubenswrapper[4922]: I0930 00:08:57.494985 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:57 crc kubenswrapper[4922]: I0930 00:08:57.495041 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:57 crc kubenswrapper[4922]: I0930 00:08:57.759742 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 00:08:57 crc kubenswrapper[4922]: I0930 00:08:57.760531 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 00:08:57 crc kubenswrapper[4922]: I0930 00:08:57.787380 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 00:08:57 crc kubenswrapper[4922]: I0930 00:08:57.802819 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 00:08:58 crc kubenswrapper[4922]: I0930 00:08:58.130332 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b8db4dbc5-pznzw" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Sep 30 00:08:58 crc kubenswrapper[4922]: I0930 00:08:58.502996 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 00:08:58 crc kubenswrapper[4922]: I0930 00:08:58.503053 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 00:08:58 crc kubenswrapper[4922]: I0930 00:08:58.723964 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749d8576cc-xm7k7" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Sep 30 00:08:59 crc kubenswrapper[4922]: I0930 00:08:59.794015 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 00:08:59 crc kubenswrapper[4922]: I0930 00:08:59.794146 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 00:08:59 crc kubenswrapper[4922]: I0930 00:08:59.796494 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 00:09:00 crc kubenswrapper[4922]: I0930 00:09:00.513193 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 00:09:00 crc kubenswrapper[4922]: I0930 00:09:00.519613 4922 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 00:09:01 crc kubenswrapper[4922]: I0930 00:09:01.229789 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 00:09:06 crc kubenswrapper[4922]: I0930 00:09:06.429976 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:09:06 crc kubenswrapper[4922]: E0930 00:09:06.430833 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.053608 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-tp6hx"] Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.064126 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-tp6hx"] Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.209938 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-phsft"] Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.212862 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.222264 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-phsft"] Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.362531 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-utilities\") pod \"community-operators-phsft\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.362602 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm7bl\" (UniqueName: \"kubernetes.io/projected/f616e79d-68ab-4d2c-b660-5c1389323f1c-kube-api-access-wm7bl\") pod \"community-operators-phsft\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.362943 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-catalog-content\") pod \"community-operators-phsft\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.465232 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-utilities\") pod \"community-operators-phsft\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.465784 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm7bl\" (UniqueName: \"kubernetes.io/projected/f616e79d-68ab-4d2c-b660-5c1389323f1c-kube-api-access-wm7bl\") pod \"community-operators-phsft\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.465715 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-utilities\") pod \"community-operators-phsft\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.466312 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-catalog-content\") pod \"community-operators-phsft\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.466625 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-catalog-content\") pod \"community-operators-phsft\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.511203 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm7bl\" (UniqueName: \"kubernetes.io/projected/f616e79d-68ab-4d2c-b660-5c1389323f1c-kube-api-access-wm7bl\") pod \"community-operators-phsft\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:09 crc kubenswrapper[4922]: I0930 00:09:09.554354 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:10 crc kubenswrapper[4922]: I0930 00:09:10.024488 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:09:10 crc kubenswrapper[4922]: I0930 00:09:10.075280 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-phsft"] Sep 30 00:09:10 crc kubenswrapper[4922]: I0930 00:09:10.438453 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="784405e9-bb71-4647-9ef8-143eef93e57b" path="/var/lib/kubelet/pods/784405e9-bb71-4647-9ef8-143eef93e57b/volumes" Sep 30 00:09:10 crc kubenswrapper[4922]: I0930 00:09:10.520315 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:09:10 crc kubenswrapper[4922]: I0930 00:09:10.629283 4922 generic.go:334] "Generic (PLEG): container finished" podID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerID="9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f" exitCode=0 Sep 30 00:09:10 crc kubenswrapper[4922]: I0930 00:09:10.629330 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phsft" event={"ID":"f616e79d-68ab-4d2c-b660-5c1389323f1c","Type":"ContainerDied","Data":"9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f"} Sep 30 00:09:10 crc kubenswrapper[4922]: I0930 00:09:10.629359 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phsft" event={"ID":"f616e79d-68ab-4d2c-b660-5c1389323f1c","Type":"ContainerStarted","Data":"72b3d00329673c7f028034275c2f82cb990aa8b11677860c88089acea4cfa474"} Sep 30 00:09:10 crc kubenswrapper[4922]: I0930 00:09:10.631492 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:09:11 crc kubenswrapper[4922]: I0930 00:09:11.641064 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phsft" event={"ID":"f616e79d-68ab-4d2c-b660-5c1389323f1c","Type":"ContainerStarted","Data":"991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b"} Sep 30 00:09:11 crc kubenswrapper[4922]: I0930 00:09:11.670069 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:09:12 crc kubenswrapper[4922]: I0930 00:09:12.196039 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:09:12 crc kubenswrapper[4922]: I0930 00:09:12.259747 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b8db4dbc5-pznzw"] Sep 30 00:09:12 crc kubenswrapper[4922]: I0930 00:09:12.653717 4922 generic.go:334] "Generic (PLEG): container finished" podID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerID="991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b" exitCode=0 Sep 30 00:09:12 crc kubenswrapper[4922]: I0930 00:09:12.653907 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b8db4dbc5-pznzw" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon-log" containerID="cri-o://3a500f8647fac29dcb8f4e187fbbf4b43fa61eeae8d714722c10ed6162a6d8a1" gracePeriod=30 Sep 30 00:09:12 crc kubenswrapper[4922]: I0930 00:09:12.655169 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phsft" event={"ID":"f616e79d-68ab-4d2c-b660-5c1389323f1c","Type":"ContainerDied","Data":"991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b"} Sep 30 00:09:12 crc kubenswrapper[4922]: I0930 00:09:12.655646 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b8db4dbc5-pznzw" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon" containerID="cri-o://4a7b189fb16904438688a60a0be32235d4508701ef61c499971b9387cdf48875" gracePeriod=30 Sep 30 00:09:13 crc kubenswrapper[4922]: I0930 00:09:13.672793 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phsft" event={"ID":"f616e79d-68ab-4d2c-b660-5c1389323f1c","Type":"ContainerStarted","Data":"a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f"} Sep 30 00:09:13 crc kubenswrapper[4922]: I0930 00:09:13.708791 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-phsft" podStartSLOduration=2.151771115 podStartE2EDuration="4.708758534s" podCreationTimestamp="2025-09-30 00:09:09 +0000 UTC" firstStartedPulling="2025-09-30 00:09:10.631202195 +0000 UTC m=+6154.941491008" lastFinishedPulling="2025-09-30 00:09:13.188189614 +0000 UTC m=+6157.498478427" observedRunningTime="2025-09-30 00:09:13.701179217 +0000 UTC m=+6158.011468040" watchObservedRunningTime="2025-09-30 00:09:13.708758534 +0000 UTC m=+6158.019047387" Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.703922 4922 generic.go:334] "Generic (PLEG): container finished" podID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerID="ef44254e5757bca3476123f62e9aad771394c593175c53f01c1afaa3d466cbc2" exitCode=137 Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.704360 4922 generic.go:334] "Generic (PLEG): container finished" podID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerID="c2fef38a011f431279d16cf8803354466e1edd824477f97958f77277716844cf" exitCode=137 Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.704021 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d7d6565-424tm" event={"ID":"a67a0a27-0c64-462e-b3f5-388a0ec4478e","Type":"ContainerDied","Data":"ef44254e5757bca3476123f62e9aad771394c593175c53f01c1afaa3d466cbc2"} Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.704431 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d7d6565-424tm" event={"ID":"a67a0a27-0c64-462e-b3f5-388a0ec4478e","Type":"ContainerDied","Data":"c2fef38a011f431279d16cf8803354466e1edd824477f97958f77277716844cf"} Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.707352 4922 generic.go:334] "Generic (PLEG): container finished" podID="4672fca9-dbff-4881-98be-58df50e49fcb" containerID="4a7b189fb16904438688a60a0be32235d4508701ef61c499971b9387cdf48875" exitCode=0 Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.707381 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8db4dbc5-pznzw" event={"ID":"4672fca9-dbff-4881-98be-58df50e49fcb","Type":"ContainerDied","Data":"4a7b189fb16904438688a60a0be32235d4508701ef61c499971b9387cdf48875"} Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.820153 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.936437 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-scripts\") pod \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.936502 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcnbv\" (UniqueName: \"kubernetes.io/projected/a67a0a27-0c64-462e-b3f5-388a0ec4478e-kube-api-access-lcnbv\") pod \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.936530 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-config-data\") pod \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.936581 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a67a0a27-0c64-462e-b3f5-388a0ec4478e-logs\") pod \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.936672 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a67a0a27-0c64-462e-b3f5-388a0ec4478e-horizon-secret-key\") pod \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\" (UID: \"a67a0a27-0c64-462e-b3f5-388a0ec4478e\") " Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.937046 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a67a0a27-0c64-462e-b3f5-388a0ec4478e-logs" (OuterVolumeSpecName: "logs") pod "a67a0a27-0c64-462e-b3f5-388a0ec4478e" (UID: "a67a0a27-0c64-462e-b3f5-388a0ec4478e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.937495 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a67a0a27-0c64-462e-b3f5-388a0ec4478e-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.943032 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a67a0a27-0c64-462e-b3f5-388a0ec4478e-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a67a0a27-0c64-462e-b3f5-388a0ec4478e" (UID: "a67a0a27-0c64-462e-b3f5-388a0ec4478e"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.943648 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a67a0a27-0c64-462e-b3f5-388a0ec4478e-kube-api-access-lcnbv" (OuterVolumeSpecName: "kube-api-access-lcnbv") pod "a67a0a27-0c64-462e-b3f5-388a0ec4478e" (UID: "a67a0a27-0c64-462e-b3f5-388a0ec4478e"). InnerVolumeSpecName "kube-api-access-lcnbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.962284 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-scripts" (OuterVolumeSpecName: "scripts") pod "a67a0a27-0c64-462e-b3f5-388a0ec4478e" (UID: "a67a0a27-0c64-462e-b3f5-388a0ec4478e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:16 crc kubenswrapper[4922]: I0930 00:09:16.974049 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-config-data" (OuterVolumeSpecName: "config-data") pod "a67a0a27-0c64-462e-b3f5-388a0ec4478e" (UID: "a67a0a27-0c64-462e-b3f5-388a0ec4478e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.039630 4922 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a67a0a27-0c64-462e-b3f5-388a0ec4478e-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.039672 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.039685 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcnbv\" (UniqueName: \"kubernetes.io/projected/a67a0a27-0c64-462e-b3f5-388a0ec4478e-kube-api-access-lcnbv\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.039698 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a67a0a27-0c64-462e-b3f5-388a0ec4478e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.721026 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d7d6565-424tm" event={"ID":"a67a0a27-0c64-462e-b3f5-388a0ec4478e","Type":"ContainerDied","Data":"dacbe4d8c5674bb38915661f295fe8e38bb81aa204a48a344df9d3d71c243579"} Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.721106 4922 scope.go:117] "RemoveContainer" containerID="ef44254e5757bca3476123f62e9aad771394c593175c53f01c1afaa3d466cbc2" Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.721152 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644d7d6565-424tm" Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.788716 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-644d7d6565-424tm"] Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.802105 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-644d7d6565-424tm"] Sep 30 00:09:17 crc kubenswrapper[4922]: I0930 00:09:17.960848 4922 scope.go:117] "RemoveContainer" containerID="c2fef38a011f431279d16cf8803354466e1edd824477f97958f77277716844cf" Sep 30 00:09:18 crc kubenswrapper[4922]: I0930 00:09:18.127966 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b8db4dbc5-pznzw" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Sep 30 00:09:18 crc kubenswrapper[4922]: I0930 00:09:18.435614 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:09:18 crc kubenswrapper[4922]: E0930 00:09:18.436000 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:09:18 crc kubenswrapper[4922]: I0930 00:09:18.447460 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" path="/var/lib/kubelet/pods/a67a0a27-0c64-462e-b3f5-388a0ec4478e/volumes" Sep 30 00:09:19 crc kubenswrapper[4922]: I0930 00:09:19.043763 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-47b4-account-create-vljxb"] Sep 30 00:09:19 crc kubenswrapper[4922]: I0930 00:09:19.060188 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-47b4-account-create-vljxb"] Sep 30 00:09:19 crc kubenswrapper[4922]: I0930 00:09:19.555277 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:19 crc kubenswrapper[4922]: I0930 00:09:19.555441 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:19 crc kubenswrapper[4922]: I0930 00:09:19.622423 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:19 crc kubenswrapper[4922]: I0930 00:09:19.814732 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:19 crc kubenswrapper[4922]: I0930 00:09:19.884754 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-phsft"] Sep 30 00:09:20 crc kubenswrapper[4922]: I0930 00:09:20.437282 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="992f8cd6-9e7f-49d9-acfc-71e1c077379d" path="/var/lib/kubelet/pods/992f8cd6-9e7f-49d9-acfc-71e1c077379d/volumes" Sep 30 00:09:21 crc kubenswrapper[4922]: I0930 00:09:21.770011 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-phsft" podUID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerName="registry-server" containerID="cri-o://a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f" gracePeriod=2 Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.280134 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.401140 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-utilities\") pod \"f616e79d-68ab-4d2c-b660-5c1389323f1c\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.401264 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-catalog-content\") pod \"f616e79d-68ab-4d2c-b660-5c1389323f1c\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.401301 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wm7bl\" (UniqueName: \"kubernetes.io/projected/f616e79d-68ab-4d2c-b660-5c1389323f1c-kube-api-access-wm7bl\") pod \"f616e79d-68ab-4d2c-b660-5c1389323f1c\" (UID: \"f616e79d-68ab-4d2c-b660-5c1389323f1c\") " Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.402021 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-utilities" (OuterVolumeSpecName: "utilities") pod "f616e79d-68ab-4d2c-b660-5c1389323f1c" (UID: "f616e79d-68ab-4d2c-b660-5c1389323f1c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.407632 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f616e79d-68ab-4d2c-b660-5c1389323f1c-kube-api-access-wm7bl" (OuterVolumeSpecName: "kube-api-access-wm7bl") pod "f616e79d-68ab-4d2c-b660-5c1389323f1c" (UID: "f616e79d-68ab-4d2c-b660-5c1389323f1c"). InnerVolumeSpecName "kube-api-access-wm7bl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.458875 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f616e79d-68ab-4d2c-b660-5c1389323f1c" (UID: "f616e79d-68ab-4d2c-b660-5c1389323f1c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.504995 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.505036 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f616e79d-68ab-4d2c-b660-5c1389323f1c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.505049 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wm7bl\" (UniqueName: \"kubernetes.io/projected/f616e79d-68ab-4d2c-b660-5c1389323f1c-kube-api-access-wm7bl\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.785366 4922 generic.go:334] "Generic (PLEG): container finished" podID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerID="a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f" exitCode=0 Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.785450 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-phsft" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.785448 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phsft" event={"ID":"f616e79d-68ab-4d2c-b660-5c1389323f1c","Type":"ContainerDied","Data":"a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f"} Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.785991 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-phsft" event={"ID":"f616e79d-68ab-4d2c-b660-5c1389323f1c","Type":"ContainerDied","Data":"72b3d00329673c7f028034275c2f82cb990aa8b11677860c88089acea4cfa474"} Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.786023 4922 scope.go:117] "RemoveContainer" containerID="a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.815977 4922 scope.go:117] "RemoveContainer" containerID="991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.849967 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-phsft"] Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.859643 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-phsft"] Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.864870 4922 scope.go:117] "RemoveContainer" containerID="9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.905656 4922 scope.go:117] "RemoveContainer" containerID="a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f" Sep 30 00:09:22 crc kubenswrapper[4922]: E0930 00:09:22.906682 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f\": container with ID starting with a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f not found: ID does not exist" containerID="a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.906760 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f"} err="failed to get container status \"a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f\": rpc error: code = NotFound desc = could not find container \"a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f\": container with ID starting with a7a752f4faaad1408889809dcc15934efa5b2e2569c25fe71060733ae1d8868f not found: ID does not exist" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.906798 4922 scope.go:117] "RemoveContainer" containerID="991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b" Sep 30 00:09:22 crc kubenswrapper[4922]: E0930 00:09:22.907263 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b\": container with ID starting with 991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b not found: ID does not exist" containerID="991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.907335 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b"} err="failed to get container status \"991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b\": rpc error: code = NotFound desc = could not find container \"991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b\": container with ID starting with 991b8cd9d10f28f614b9e8e6ddbed4263abf6b8ea78d4dca8fa455b4a4b4023b not found: ID does not exist" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.907379 4922 scope.go:117] "RemoveContainer" containerID="9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f" Sep 30 00:09:22 crc kubenswrapper[4922]: E0930 00:09:22.908070 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f\": container with ID starting with 9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f not found: ID does not exist" containerID="9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f" Sep 30 00:09:22 crc kubenswrapper[4922]: I0930 00:09:22.908107 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f"} err="failed to get container status \"9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f\": rpc error: code = NotFound desc = could not find container \"9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f\": container with ID starting with 9fa7cc6f609e2d453c05ed6567481286903a358d42fcc75d442aa7744c48354f not found: ID does not exist" Sep 30 00:09:24 crc kubenswrapper[4922]: I0930 00:09:24.437278 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f616e79d-68ab-4d2c-b660-5c1389323f1c" path="/var/lib/kubelet/pods/f616e79d-68ab-4d2c-b660-5c1389323f1c/volumes" Sep 30 00:09:26 crc kubenswrapper[4922]: I0930 00:09:26.041315 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-6xwbm"] Sep 30 00:09:26 crc kubenswrapper[4922]: I0930 00:09:26.049993 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-6xwbm"] Sep 30 00:09:26 crc kubenswrapper[4922]: I0930 00:09:26.457421 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2b532ba-4446-49c0-aa22-263f4ddf0a61" path="/var/lib/kubelet/pods/d2b532ba-4446-49c0-aa22-263f4ddf0a61/volumes" Sep 30 00:09:28 crc kubenswrapper[4922]: I0930 00:09:28.127347 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b8db4dbc5-pznzw" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Sep 30 00:09:32 crc kubenswrapper[4922]: I0930 00:09:32.423489 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:09:32 crc kubenswrapper[4922]: E0930 00:09:32.425338 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:09:38 crc kubenswrapper[4922]: I0930 00:09:38.127305 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b8db4dbc5-pznzw" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Sep 30 00:09:38 crc kubenswrapper[4922]: I0930 00:09:38.128109 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.047744 4922 generic.go:334] "Generic (PLEG): container finished" podID="4672fca9-dbff-4881-98be-58df50e49fcb" containerID="3a500f8647fac29dcb8f4e187fbbf4b43fa61eeae8d714722c10ed6162a6d8a1" exitCode=137 Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.047789 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8db4dbc5-pznzw" event={"ID":"4672fca9-dbff-4881-98be-58df50e49fcb","Type":"ContainerDied","Data":"3a500f8647fac29dcb8f4e187fbbf4b43fa61eeae8d714722c10ed6162a6d8a1"} Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.048474 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b8db4dbc5-pznzw" event={"ID":"4672fca9-dbff-4881-98be-58df50e49fcb","Type":"ContainerDied","Data":"0e4cb0dac79b548987675a2c23864e2e527acb44e2133d4df95715ba57ac7f9a"} Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.048491 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e4cb0dac79b548987675a2c23864e2e527acb44e2133d4df95715ba57ac7f9a" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.104157 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.204370 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-scripts\") pod \"4672fca9-dbff-4881-98be-58df50e49fcb\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.204478 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7w8xs\" (UniqueName: \"kubernetes.io/projected/4672fca9-dbff-4881-98be-58df50e49fcb-kube-api-access-7w8xs\") pod \"4672fca9-dbff-4881-98be-58df50e49fcb\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.204597 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4672fca9-dbff-4881-98be-58df50e49fcb-logs\") pod \"4672fca9-dbff-4881-98be-58df50e49fcb\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.204648 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4672fca9-dbff-4881-98be-58df50e49fcb-horizon-secret-key\") pod \"4672fca9-dbff-4881-98be-58df50e49fcb\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.204723 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-config-data\") pod \"4672fca9-dbff-4881-98be-58df50e49fcb\" (UID: \"4672fca9-dbff-4881-98be-58df50e49fcb\") " Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.204985 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4672fca9-dbff-4881-98be-58df50e49fcb-logs" (OuterVolumeSpecName: "logs") pod "4672fca9-dbff-4881-98be-58df50e49fcb" (UID: "4672fca9-dbff-4881-98be-58df50e49fcb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.205316 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4672fca9-dbff-4881-98be-58df50e49fcb-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.210677 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4672fca9-dbff-4881-98be-58df50e49fcb-kube-api-access-7w8xs" (OuterVolumeSpecName: "kube-api-access-7w8xs") pod "4672fca9-dbff-4881-98be-58df50e49fcb" (UID: "4672fca9-dbff-4881-98be-58df50e49fcb"). InnerVolumeSpecName "kube-api-access-7w8xs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.214498 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4672fca9-dbff-4881-98be-58df50e49fcb-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "4672fca9-dbff-4881-98be-58df50e49fcb" (UID: "4672fca9-dbff-4881-98be-58df50e49fcb"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.246903 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-config-data" (OuterVolumeSpecName: "config-data") pod "4672fca9-dbff-4881-98be-58df50e49fcb" (UID: "4672fca9-dbff-4881-98be-58df50e49fcb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.249492 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-scripts" (OuterVolumeSpecName: "scripts") pod "4672fca9-dbff-4881-98be-58df50e49fcb" (UID: "4672fca9-dbff-4881-98be-58df50e49fcb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.307727 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.307777 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7w8xs\" (UniqueName: \"kubernetes.io/projected/4672fca9-dbff-4881-98be-58df50e49fcb-kube-api-access-7w8xs\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.307799 4922 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4672fca9-dbff-4881-98be-58df50e49fcb-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:43 crc kubenswrapper[4922]: I0930 00:09:43.307816 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4672fca9-dbff-4881-98be-58df50e49fcb-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:44 crc kubenswrapper[4922]: I0930 00:09:44.057827 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b8db4dbc5-pznzw" Sep 30 00:09:44 crc kubenswrapper[4922]: I0930 00:09:44.101098 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b8db4dbc5-pznzw"] Sep 30 00:09:44 crc kubenswrapper[4922]: I0930 00:09:44.108678 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7b8db4dbc5-pznzw"] Sep 30 00:09:44 crc kubenswrapper[4922]: I0930 00:09:44.442604 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" path="/var/lib/kubelet/pods/4672fca9-dbff-4881-98be-58df50e49fcb/volumes" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.428029 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:09:46 crc kubenswrapper[4922]: E0930 00:09:46.428582 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467193 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-648ffd58c7-rcgrg"] Sep 30 00:09:46 crc kubenswrapper[4922]: E0930 00:09:46.467604 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerName="horizon" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467626 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerName="horizon" Sep 30 00:09:46 crc kubenswrapper[4922]: E0930 00:09:46.467641 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerName="registry-server" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467648 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerName="registry-server" Sep 30 00:09:46 crc kubenswrapper[4922]: E0930 00:09:46.467662 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerName="horizon-log" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467668 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerName="horizon-log" Sep 30 00:09:46 crc kubenswrapper[4922]: E0930 00:09:46.467689 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467695 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon" Sep 30 00:09:46 crc kubenswrapper[4922]: E0930 00:09:46.467707 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon-log" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467712 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon-log" Sep 30 00:09:46 crc kubenswrapper[4922]: E0930 00:09:46.467721 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerName="extract-content" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467727 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerName="extract-content" Sep 30 00:09:46 crc kubenswrapper[4922]: E0930 00:09:46.467737 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerName="extract-utilities" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467745 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerName="extract-utilities" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467924 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerName="horizon" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467941 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f616e79d-68ab-4d2c-b660-5c1389323f1c" containerName="registry-server" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467950 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467958 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a67a0a27-0c64-462e-b3f5-388a0ec4478e" containerName="horizon-log" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.467970 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4672fca9-dbff-4881-98be-58df50e49fcb" containerName="horizon-log" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.468956 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.478099 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-648ffd58c7-rcgrg"] Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.580451 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c47bee4-1616-4b27-9980-79628af51f6c-logs\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.580523 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6c47bee4-1616-4b27-9980-79628af51f6c-config-data\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.580696 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x285\" (UniqueName: \"kubernetes.io/projected/6c47bee4-1616-4b27-9980-79628af51f6c-kube-api-access-2x285\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.580814 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6c47bee4-1616-4b27-9980-79628af51f6c-scripts\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.580919 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6c47bee4-1616-4b27-9980-79628af51f6c-horizon-secret-key\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.682907 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c47bee4-1616-4b27-9980-79628af51f6c-logs\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.682981 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6c47bee4-1616-4b27-9980-79628af51f6c-config-data\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.683086 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x285\" (UniqueName: \"kubernetes.io/projected/6c47bee4-1616-4b27-9980-79628af51f6c-kube-api-access-2x285\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.683142 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6c47bee4-1616-4b27-9980-79628af51f6c-scripts\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.683254 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6c47bee4-1616-4b27-9980-79628af51f6c-horizon-secret-key\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.683444 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c47bee4-1616-4b27-9980-79628af51f6c-logs\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.684304 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6c47bee4-1616-4b27-9980-79628af51f6c-scripts\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.684985 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6c47bee4-1616-4b27-9980-79628af51f6c-config-data\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.692094 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6c47bee4-1616-4b27-9980-79628af51f6c-horizon-secret-key\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.713690 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x285\" (UniqueName: \"kubernetes.io/projected/6c47bee4-1616-4b27-9980-79628af51f6c-kube-api-access-2x285\") pod \"horizon-648ffd58c7-rcgrg\" (UID: \"6c47bee4-1616-4b27-9980-79628af51f6c\") " pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:46 crc kubenswrapper[4922]: I0930 00:09:46.784640 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:47 crc kubenswrapper[4922]: W0930 00:09:47.293347 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c47bee4_1616_4b27_9980_79628af51f6c.slice/crio-3424d8e3e0a1ff4474ac8c12b8359ca80bd6e8b76b5fb77bcbb777bcc7ac30c6 WatchSource:0}: Error finding container 3424d8e3e0a1ff4474ac8c12b8359ca80bd6e8b76b5fb77bcbb777bcc7ac30c6: Status 404 returned error can't find the container with id 3424d8e3e0a1ff4474ac8c12b8359ca80bd6e8b76b5fb77bcbb777bcc7ac30c6 Sep 30 00:09:47 crc kubenswrapper[4922]: I0930 00:09:47.294232 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-648ffd58c7-rcgrg"] Sep 30 00:09:47 crc kubenswrapper[4922]: I0930 00:09:47.848715 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-nwltr"] Sep 30 00:09:47 crc kubenswrapper[4922]: I0930 00:09:47.849932 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-nwltr" Sep 30 00:09:47 crc kubenswrapper[4922]: I0930 00:09:47.860311 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-nwltr"] Sep 30 00:09:47 crc kubenswrapper[4922]: I0930 00:09:47.913732 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqjl7\" (UniqueName: \"kubernetes.io/projected/cb45e14c-f6a0-4fc6-b74d-c3f44cda04df-kube-api-access-hqjl7\") pod \"heat-db-create-nwltr\" (UID: \"cb45e14c-f6a0-4fc6-b74d-c3f44cda04df\") " pod="openstack/heat-db-create-nwltr" Sep 30 00:09:48 crc kubenswrapper[4922]: I0930 00:09:48.015584 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqjl7\" (UniqueName: \"kubernetes.io/projected/cb45e14c-f6a0-4fc6-b74d-c3f44cda04df-kube-api-access-hqjl7\") pod \"heat-db-create-nwltr\" (UID: \"cb45e14c-f6a0-4fc6-b74d-c3f44cda04df\") " pod="openstack/heat-db-create-nwltr" Sep 30 00:09:48 crc kubenswrapper[4922]: I0930 00:09:48.034571 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqjl7\" (UniqueName: \"kubernetes.io/projected/cb45e14c-f6a0-4fc6-b74d-c3f44cda04df-kube-api-access-hqjl7\") pod \"heat-db-create-nwltr\" (UID: \"cb45e14c-f6a0-4fc6-b74d-c3f44cda04df\") " pod="openstack/heat-db-create-nwltr" Sep 30 00:09:48 crc kubenswrapper[4922]: I0930 00:09:48.109681 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-648ffd58c7-rcgrg" event={"ID":"6c47bee4-1616-4b27-9980-79628af51f6c","Type":"ContainerStarted","Data":"bff36c8e505aa313d7137c22c91d1cbc6d358e52e3e49db179fd03fa635e7367"} Sep 30 00:09:48 crc kubenswrapper[4922]: I0930 00:09:48.110042 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-648ffd58c7-rcgrg" event={"ID":"6c47bee4-1616-4b27-9980-79628af51f6c","Type":"ContainerStarted","Data":"3424d8e3e0a1ff4474ac8c12b8359ca80bd6e8b76b5fb77bcbb777bcc7ac30c6"} Sep 30 00:09:48 crc kubenswrapper[4922]: I0930 00:09:48.167904 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-nwltr" Sep 30 00:09:48 crc kubenswrapper[4922]: I0930 00:09:48.631080 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-nwltr"] Sep 30 00:09:48 crc kubenswrapper[4922]: W0930 00:09:48.637954 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcb45e14c_f6a0_4fc6_b74d_c3f44cda04df.slice/crio-c9598e452dca26ca0686a7bfa38168b7e2c520f2064ef205f2b57f3c4d01eca0 WatchSource:0}: Error finding container c9598e452dca26ca0686a7bfa38168b7e2c520f2064ef205f2b57f3c4d01eca0: Status 404 returned error can't find the container with id c9598e452dca26ca0686a7bfa38168b7e2c520f2064ef205f2b57f3c4d01eca0 Sep 30 00:09:49 crc kubenswrapper[4922]: I0930 00:09:49.120551 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-648ffd58c7-rcgrg" event={"ID":"6c47bee4-1616-4b27-9980-79628af51f6c","Type":"ContainerStarted","Data":"7639a10958e46d62f646f9ad950cb45c660bc04719fa565ab8b4b2b4368018f8"} Sep 30 00:09:49 crc kubenswrapper[4922]: I0930 00:09:49.124022 4922 generic.go:334] "Generic (PLEG): container finished" podID="cb45e14c-f6a0-4fc6-b74d-c3f44cda04df" containerID="751be494e5d3b82cb56fc8c03584f37c3f1eca6ca4779eb4920f9779c20291a9" exitCode=0 Sep 30 00:09:49 crc kubenswrapper[4922]: I0930 00:09:49.124081 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-nwltr" event={"ID":"cb45e14c-f6a0-4fc6-b74d-c3f44cda04df","Type":"ContainerDied","Data":"751be494e5d3b82cb56fc8c03584f37c3f1eca6ca4779eb4920f9779c20291a9"} Sep 30 00:09:49 crc kubenswrapper[4922]: I0930 00:09:49.124212 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-nwltr" event={"ID":"cb45e14c-f6a0-4fc6-b74d-c3f44cda04df","Type":"ContainerStarted","Data":"c9598e452dca26ca0686a7bfa38168b7e2c520f2064ef205f2b57f3c4d01eca0"} Sep 30 00:09:49 crc kubenswrapper[4922]: I0930 00:09:49.147357 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-648ffd58c7-rcgrg" podStartSLOduration=3.147337148 podStartE2EDuration="3.147337148s" podCreationTimestamp="2025-09-30 00:09:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:09:49.141807801 +0000 UTC m=+6193.452096624" watchObservedRunningTime="2025-09-30 00:09:49.147337148 +0000 UTC m=+6193.457625961" Sep 30 00:09:50 crc kubenswrapper[4922]: I0930 00:09:50.590573 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-nwltr" Sep 30 00:09:50 crc kubenswrapper[4922]: I0930 00:09:50.606304 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqjl7\" (UniqueName: \"kubernetes.io/projected/cb45e14c-f6a0-4fc6-b74d-c3f44cda04df-kube-api-access-hqjl7\") pod \"cb45e14c-f6a0-4fc6-b74d-c3f44cda04df\" (UID: \"cb45e14c-f6a0-4fc6-b74d-c3f44cda04df\") " Sep 30 00:09:50 crc kubenswrapper[4922]: I0930 00:09:50.612342 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb45e14c-f6a0-4fc6-b74d-c3f44cda04df-kube-api-access-hqjl7" (OuterVolumeSpecName: "kube-api-access-hqjl7") pod "cb45e14c-f6a0-4fc6-b74d-c3f44cda04df" (UID: "cb45e14c-f6a0-4fc6-b74d-c3f44cda04df"). InnerVolumeSpecName "kube-api-access-hqjl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:09:50 crc kubenswrapper[4922]: I0930 00:09:50.709191 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqjl7\" (UniqueName: \"kubernetes.io/projected/cb45e14c-f6a0-4fc6-b74d-c3f44cda04df-kube-api-access-hqjl7\") on node \"crc\" DevicePath \"\"" Sep 30 00:09:51 crc kubenswrapper[4922]: I0930 00:09:51.148693 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-nwltr" event={"ID":"cb45e14c-f6a0-4fc6-b74d-c3f44cda04df","Type":"ContainerDied","Data":"c9598e452dca26ca0686a7bfa38168b7e2c520f2064ef205f2b57f3c4d01eca0"} Sep 30 00:09:51 crc kubenswrapper[4922]: I0930 00:09:51.148749 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9598e452dca26ca0686a7bfa38168b7e2c520f2064ef205f2b57f3c4d01eca0" Sep 30 00:09:51 crc kubenswrapper[4922]: I0930 00:09:51.148756 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-nwltr" Sep 30 00:09:54 crc kubenswrapper[4922]: I0930 00:09:54.463082 4922 scope.go:117] "RemoveContainer" containerID="e18bad55480bd38b4b32b8d27428b1eb1405b32af92c736675bd7d9d4405d4ff" Sep 30 00:09:54 crc kubenswrapper[4922]: I0930 00:09:54.521360 4922 scope.go:117] "RemoveContainer" containerID="215430d776de9256f863e9e8acdb8e99807a899057b0f9a192516d48a098fc45" Sep 30 00:09:54 crc kubenswrapper[4922]: I0930 00:09:54.554797 4922 scope.go:117] "RemoveContainer" containerID="989651c77172e61e6acb59a9eaea0610a044fa55e95d10cb2f672d3388ee6a32" Sep 30 00:09:54 crc kubenswrapper[4922]: I0930 00:09:54.635110 4922 scope.go:117] "RemoveContainer" containerID="f8ac858019877d34a1536d81c1e6549821bb23a4ef10e1d8a247ee2e5d25f3e2" Sep 30 00:09:54 crc kubenswrapper[4922]: I0930 00:09:54.673556 4922 scope.go:117] "RemoveContainer" containerID="46507cc70678c5b8aa6449d332718e8f6811a1f165793ef1473e29534f398fcc" Sep 30 00:09:56 crc kubenswrapper[4922]: I0930 00:09:56.786442 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:56 crc kubenswrapper[4922]: I0930 00:09:56.791935 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.055740 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-jvq8g"] Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.064405 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-jvq8g"] Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.091154 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-bbea-account-create-t7zn6"] Sep 30 00:09:58 crc kubenswrapper[4922]: E0930 00:09:58.091899 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb45e14c-f6a0-4fc6-b74d-c3f44cda04df" containerName="mariadb-database-create" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.091921 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb45e14c-f6a0-4fc6-b74d-c3f44cda04df" containerName="mariadb-database-create" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.092097 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb45e14c-f6a0-4fc6-b74d-c3f44cda04df" containerName="mariadb-database-create" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.092773 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-bbea-account-create-t7zn6" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.096144 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.105064 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-bbea-account-create-t7zn6"] Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.276498 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7n7w\" (UniqueName: \"kubernetes.io/projected/c61328e6-b9b4-420b-a24c-ea06d50fbcc6-kube-api-access-n7n7w\") pod \"heat-bbea-account-create-t7zn6\" (UID: \"c61328e6-b9b4-420b-a24c-ea06d50fbcc6\") " pod="openstack/heat-bbea-account-create-t7zn6" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.378214 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7n7w\" (UniqueName: \"kubernetes.io/projected/c61328e6-b9b4-420b-a24c-ea06d50fbcc6-kube-api-access-n7n7w\") pod \"heat-bbea-account-create-t7zn6\" (UID: \"c61328e6-b9b4-420b-a24c-ea06d50fbcc6\") " pod="openstack/heat-bbea-account-create-t7zn6" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.403322 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7n7w\" (UniqueName: \"kubernetes.io/projected/c61328e6-b9b4-420b-a24c-ea06d50fbcc6-kube-api-access-n7n7w\") pod \"heat-bbea-account-create-t7zn6\" (UID: \"c61328e6-b9b4-420b-a24c-ea06d50fbcc6\") " pod="openstack/heat-bbea-account-create-t7zn6" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.413955 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-bbea-account-create-t7zn6" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.433308 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd7ac5d1-b56d-451d-a60f-ef00bc34c49b" path="/var/lib/kubelet/pods/fd7ac5d1-b56d-451d-a60f-ef00bc34c49b/volumes" Sep 30 00:09:58 crc kubenswrapper[4922]: I0930 00:09:58.872266 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-bbea-account-create-t7zn6"] Sep 30 00:09:59 crc kubenswrapper[4922]: I0930 00:09:59.231381 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-bbea-account-create-t7zn6" event={"ID":"c61328e6-b9b4-420b-a24c-ea06d50fbcc6","Type":"ContainerStarted","Data":"145bb6bcc6e74bfcdeda5e53dfbdebd065c800a41bd45f9ec975c553998e1977"} Sep 30 00:09:59 crc kubenswrapper[4922]: I0930 00:09:59.233051 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-bbea-account-create-t7zn6" event={"ID":"c61328e6-b9b4-420b-a24c-ea06d50fbcc6","Type":"ContainerStarted","Data":"582eaa891aa1d811510558fc92ae87e5f3917f7318f80bd944fb6ca57afdaf53"} Sep 30 00:09:59 crc kubenswrapper[4922]: I0930 00:09:59.253721 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-bbea-account-create-t7zn6" podStartSLOduration=1.253705499 podStartE2EDuration="1.253705499s" podCreationTimestamp="2025-09-30 00:09:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:09:59.247704521 +0000 UTC m=+6203.557993344" watchObservedRunningTime="2025-09-30 00:09:59.253705499 +0000 UTC m=+6203.563994312" Sep 30 00:10:00 crc kubenswrapper[4922]: I0930 00:10:00.244509 4922 generic.go:334] "Generic (PLEG): container finished" podID="c61328e6-b9b4-420b-a24c-ea06d50fbcc6" containerID="145bb6bcc6e74bfcdeda5e53dfbdebd065c800a41bd45f9ec975c553998e1977" exitCode=0 Sep 30 00:10:00 crc kubenswrapper[4922]: I0930 00:10:00.244642 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-bbea-account-create-t7zn6" event={"ID":"c61328e6-b9b4-420b-a24c-ea06d50fbcc6","Type":"ContainerDied","Data":"145bb6bcc6e74bfcdeda5e53dfbdebd065c800a41bd45f9ec975c553998e1977"} Sep 30 00:10:00 crc kubenswrapper[4922]: I0930 00:10:00.424358 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:10:00 crc kubenswrapper[4922]: E0930 00:10:00.424779 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:10:01 crc kubenswrapper[4922]: I0930 00:10:01.665927 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-bbea-account-create-t7zn6" Sep 30 00:10:01 crc kubenswrapper[4922]: I0930 00:10:01.755958 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7n7w\" (UniqueName: \"kubernetes.io/projected/c61328e6-b9b4-420b-a24c-ea06d50fbcc6-kube-api-access-n7n7w\") pod \"c61328e6-b9b4-420b-a24c-ea06d50fbcc6\" (UID: \"c61328e6-b9b4-420b-a24c-ea06d50fbcc6\") " Sep 30 00:10:01 crc kubenswrapper[4922]: I0930 00:10:01.767599 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c61328e6-b9b4-420b-a24c-ea06d50fbcc6-kube-api-access-n7n7w" (OuterVolumeSpecName: "kube-api-access-n7n7w") pod "c61328e6-b9b4-420b-a24c-ea06d50fbcc6" (UID: "c61328e6-b9b4-420b-a24c-ea06d50fbcc6"). InnerVolumeSpecName "kube-api-access-n7n7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:10:01 crc kubenswrapper[4922]: I0930 00:10:01.858714 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7n7w\" (UniqueName: \"kubernetes.io/projected/c61328e6-b9b4-420b-a24c-ea06d50fbcc6-kube-api-access-n7n7w\") on node \"crc\" DevicePath \"\"" Sep 30 00:10:02 crc kubenswrapper[4922]: I0930 00:10:02.278708 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-bbea-account-create-t7zn6" event={"ID":"c61328e6-b9b4-420b-a24c-ea06d50fbcc6","Type":"ContainerDied","Data":"582eaa891aa1d811510558fc92ae87e5f3917f7318f80bd944fb6ca57afdaf53"} Sep 30 00:10:02 crc kubenswrapper[4922]: I0930 00:10:02.278754 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="582eaa891aa1d811510558fc92ae87e5f3917f7318f80bd944fb6ca57afdaf53" Sep 30 00:10:02 crc kubenswrapper[4922]: I0930 00:10:02.278815 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-bbea-account-create-t7zn6" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.254119 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-qfs45"] Sep 30 00:10:03 crc kubenswrapper[4922]: E0930 00:10:03.254771 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c61328e6-b9b4-420b-a24c-ea06d50fbcc6" containerName="mariadb-account-create" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.254785 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c61328e6-b9b4-420b-a24c-ea06d50fbcc6" containerName="mariadb-account-create" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.254972 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c61328e6-b9b4-420b-a24c-ea06d50fbcc6" containerName="mariadb-account-create" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.256165 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.260927 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.261267 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-qjpjg" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.278018 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-qfs45"] Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.399088 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ftht\" (UniqueName: \"kubernetes.io/projected/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-kube-api-access-9ftht\") pod \"heat-db-sync-qfs45\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.399346 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-config-data\") pod \"heat-db-sync-qfs45\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.399492 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-combined-ca-bundle\") pod \"heat-db-sync-qfs45\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.501096 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ftht\" (UniqueName: \"kubernetes.io/projected/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-kube-api-access-9ftht\") pod \"heat-db-sync-qfs45\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.501750 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-config-data\") pod \"heat-db-sync-qfs45\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.501922 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-combined-ca-bundle\") pod \"heat-db-sync-qfs45\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.516374 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-combined-ca-bundle\") pod \"heat-db-sync-qfs45\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.518146 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-config-data\") pod \"heat-db-sync-qfs45\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.531076 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ftht\" (UniqueName: \"kubernetes.io/projected/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-kube-api-access-9ftht\") pod \"heat-db-sync-qfs45\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:03 crc kubenswrapper[4922]: I0930 00:10:03.580990 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:04 crc kubenswrapper[4922]: I0930 00:10:04.083783 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-qfs45"] Sep 30 00:10:04 crc kubenswrapper[4922]: I0930 00:10:04.334004 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qfs45" event={"ID":"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d","Type":"ContainerStarted","Data":"51cfdf838b9dd94021ab2490174add50185add401542c98bfd1e2841637c312c"} Sep 30 00:10:06 crc kubenswrapper[4922]: I0930 00:10:06.788564 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-648ffd58c7-rcgrg" podUID="6c47bee4-1616-4b27-9980-79628af51f6c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.116:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.116:8080: connect: connection refused" Sep 30 00:10:08 crc kubenswrapper[4922]: I0930 00:10:08.042840 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-7e9b-account-create-hqhgw"] Sep 30 00:10:08 crc kubenswrapper[4922]: I0930 00:10:08.053221 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-7e9b-account-create-hqhgw"] Sep 30 00:10:08 crc kubenswrapper[4922]: I0930 00:10:08.438822 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc7f5af7-c789-4911-bf7a-92e6f2aaf128" path="/var/lib/kubelet/pods/dc7f5af7-c789-4911-bf7a-92e6f2aaf128/volumes" Sep 30 00:10:12 crc kubenswrapper[4922]: I0930 00:10:12.422080 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:10:12 crc kubenswrapper[4922]: E0930 00:10:12.423458 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:10:14 crc kubenswrapper[4922]: I0930 00:10:14.029559 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-jcbvf"] Sep 30 00:10:14 crc kubenswrapper[4922]: I0930 00:10:14.042228 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-jcbvf"] Sep 30 00:10:14 crc kubenswrapper[4922]: I0930 00:10:14.434702 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8def651b-de0d-4c92-899a-f0844c10106b" path="/var/lib/kubelet/pods/8def651b-de0d-4c92-899a-f0844c10106b/volumes" Sep 30 00:10:14 crc kubenswrapper[4922]: I0930 00:10:14.437331 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qfs45" event={"ID":"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d","Type":"ContainerStarted","Data":"15628ca873c68d3ef55f35a47c181ddf28c20d1f8703afdd45801b78bbd60d2c"} Sep 30 00:10:14 crc kubenswrapper[4922]: I0930 00:10:14.458644 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-qfs45" podStartSLOduration=2.123126101 podStartE2EDuration="11.458626802s" podCreationTimestamp="2025-09-30 00:10:03 +0000 UTC" firstStartedPulling="2025-09-30 00:10:04.09415053 +0000 UTC m=+6208.404439383" lastFinishedPulling="2025-09-30 00:10:13.429651261 +0000 UTC m=+6217.739940084" observedRunningTime="2025-09-30 00:10:14.457468574 +0000 UTC m=+6218.767757397" watchObservedRunningTime="2025-09-30 00:10:14.458626802 +0000 UTC m=+6218.768915615" Sep 30 00:10:16 crc kubenswrapper[4922]: I0930 00:10:16.471855 4922 generic.go:334] "Generic (PLEG): container finished" podID="6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d" containerID="15628ca873c68d3ef55f35a47c181ddf28c20d1f8703afdd45801b78bbd60d2c" exitCode=0 Sep 30 00:10:16 crc kubenswrapper[4922]: I0930 00:10:16.471976 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qfs45" event={"ID":"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d","Type":"ContainerDied","Data":"15628ca873c68d3ef55f35a47c181ddf28c20d1f8703afdd45801b78bbd60d2c"} Sep 30 00:10:17 crc kubenswrapper[4922]: I0930 00:10:17.901148 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.059457 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-config-data\") pod \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.059754 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-combined-ca-bundle\") pod \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.059941 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ftht\" (UniqueName: \"kubernetes.io/projected/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-kube-api-access-9ftht\") pod \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\" (UID: \"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d\") " Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.064549 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-kube-api-access-9ftht" (OuterVolumeSpecName: "kube-api-access-9ftht") pod "6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d" (UID: "6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d"). InnerVolumeSpecName "kube-api-access-9ftht". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.091556 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d" (UID: "6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.162933 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.162988 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ftht\" (UniqueName: \"kubernetes.io/projected/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-kube-api-access-9ftht\") on node \"crc\" DevicePath \"\"" Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.166725 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-config-data" (OuterVolumeSpecName: "config-data") pod "6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d" (UID: "6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.265260 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.517749 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-qfs45" event={"ID":"6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d","Type":"ContainerDied","Data":"51cfdf838b9dd94021ab2490174add50185add401542c98bfd1e2841637c312c"} Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.517807 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-qfs45" Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.517836 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51cfdf838b9dd94021ab2490174add50185add401542c98bfd1e2841637c312c" Sep 30 00:10:18 crc kubenswrapper[4922]: I0930 00:10:18.782725 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:10:19 crc kubenswrapper[4922]: I0930 00:10:19.960246 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-5d4f95dff7-7mnm2"] Sep 30 00:10:19 crc kubenswrapper[4922]: E0930 00:10:19.961347 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d" containerName="heat-db-sync" Sep 30 00:10:19 crc kubenswrapper[4922]: I0930 00:10:19.961366 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d" containerName="heat-db-sync" Sep 30 00:10:19 crc kubenswrapper[4922]: I0930 00:10:19.961654 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d" containerName="heat-db-sync" Sep 30 00:10:19 crc kubenswrapper[4922]: I0930 00:10:19.962529 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:19 crc kubenswrapper[4922]: I0930 00:10:19.969223 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Sep 30 00:10:19 crc kubenswrapper[4922]: I0930 00:10:19.969482 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-qjpjg" Sep 30 00:10:19 crc kubenswrapper[4922]: I0930 00:10:19.969555 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:19.999376 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-5d4f95dff7-7mnm2"] Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.027557 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847d3ab6-ffef-437b-b3d3-9a073fba0deb-config-data\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.027669 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847d3ab6-ffef-437b-b3d3-9a073fba0deb-combined-ca-bundle\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.028814 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/847d3ab6-ffef-437b-b3d3-9a073fba0deb-config-data-custom\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.030194 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rpwn\" (UniqueName: \"kubernetes.io/projected/847d3ab6-ffef-437b-b3d3-9a073fba0deb-kube-api-access-9rpwn\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.079025 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-9cc9d7476-w7smm"] Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.080790 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.085730 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.099780 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-9cc9d7476-w7smm"] Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.110554 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-776d95b874-qp56h"] Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.112438 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.117866 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.124963 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-776d95b874-qp56h"] Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.131796 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-config-data\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.131855 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-config-data-custom\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.131910 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4bxc\" (UniqueName: \"kubernetes.io/projected/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-kube-api-access-d4bxc\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.131947 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/847d3ab6-ffef-437b-b3d3-9a073fba0deb-config-data-custom\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.131975 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f8a75ca-eefb-4908-88db-d35951495a62-config-data\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.131992 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-combined-ca-bundle\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.132020 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8f8a75ca-eefb-4908-88db-d35951495a62-config-data-custom\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.132053 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rpwn\" (UniqueName: \"kubernetes.io/projected/847d3ab6-ffef-437b-b3d3-9a073fba0deb-kube-api-access-9rpwn\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.132089 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f8a75ca-eefb-4908-88db-d35951495a62-combined-ca-bundle\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.132618 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwg6r\" (UniqueName: \"kubernetes.io/projected/8f8a75ca-eefb-4908-88db-d35951495a62-kube-api-access-xwg6r\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.132672 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847d3ab6-ffef-437b-b3d3-9a073fba0deb-config-data\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.132691 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847d3ab6-ffef-437b-b3d3-9a073fba0deb-combined-ca-bundle\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.138551 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847d3ab6-ffef-437b-b3d3-9a073fba0deb-config-data\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.144245 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847d3ab6-ffef-437b-b3d3-9a073fba0deb-combined-ca-bundle\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.147024 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/847d3ab6-ffef-437b-b3d3-9a073fba0deb-config-data-custom\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.158083 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rpwn\" (UniqueName: \"kubernetes.io/projected/847d3ab6-ffef-437b-b3d3-9a073fba0deb-kube-api-access-9rpwn\") pod \"heat-engine-5d4f95dff7-7mnm2\" (UID: \"847d3ab6-ffef-437b-b3d3-9a073fba0deb\") " pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.234316 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f8a75ca-eefb-4908-88db-d35951495a62-config-data\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.234623 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-combined-ca-bundle\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.234656 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8f8a75ca-eefb-4908-88db-d35951495a62-config-data-custom\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.234718 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f8a75ca-eefb-4908-88db-d35951495a62-combined-ca-bundle\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.234739 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwg6r\" (UniqueName: \"kubernetes.io/projected/8f8a75ca-eefb-4908-88db-d35951495a62-kube-api-access-xwg6r\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.234797 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-config-data\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.234826 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-config-data-custom\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.234874 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4bxc\" (UniqueName: \"kubernetes.io/projected/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-kube-api-access-d4bxc\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.239061 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f8a75ca-eefb-4908-88db-d35951495a62-combined-ca-bundle\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.240032 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8f8a75ca-eefb-4908-88db-d35951495a62-config-data-custom\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.243240 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f8a75ca-eefb-4908-88db-d35951495a62-config-data\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.243974 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-combined-ca-bundle\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.244130 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-config-data-custom\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.254944 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-config-data\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.256199 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4bxc\" (UniqueName: \"kubernetes.io/projected/fc184a80-6988-4fa7-9ca7-b58db26d4fe3-kube-api-access-d4bxc\") pod \"heat-api-776d95b874-qp56h\" (UID: \"fc184a80-6988-4fa7-9ca7-b58db26d4fe3\") " pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.258955 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwg6r\" (UniqueName: \"kubernetes.io/projected/8f8a75ca-eefb-4908-88db-d35951495a62-kube-api-access-xwg6r\") pod \"heat-cfnapi-9cc9d7476-w7smm\" (UID: \"8f8a75ca-eefb-4908-88db-d35951495a62\") " pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.294696 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.417962 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.432039 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.792428 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-5d4f95dff7-7mnm2"] Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.885719 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-648ffd58c7-rcgrg" Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.963128 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-749d8576cc-xm7k7"] Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.963683 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-749d8576cc-xm7k7" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon-log" containerID="cri-o://9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f" gracePeriod=30 Sep 30 00:10:20 crc kubenswrapper[4922]: I0930 00:10:20.963741 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-749d8576cc-xm7k7" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon" containerID="cri-o://f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0" gracePeriod=30 Sep 30 00:10:21 crc kubenswrapper[4922]: I0930 00:10:21.022091 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-9cc9d7476-w7smm"] Sep 30 00:10:21 crc kubenswrapper[4922]: I0930 00:10:21.036106 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-776d95b874-qp56h"] Sep 30 00:10:21 crc kubenswrapper[4922]: I0930 00:10:21.553827 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-5d4f95dff7-7mnm2" event={"ID":"847d3ab6-ffef-437b-b3d3-9a073fba0deb","Type":"ContainerStarted","Data":"6cc9a370307f4e0d20d0b0453f22e54e59dccafa2fea4fbbf6e712d7fdd62f8e"} Sep 30 00:10:21 crc kubenswrapper[4922]: I0930 00:10:21.554173 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-5d4f95dff7-7mnm2" event={"ID":"847d3ab6-ffef-437b-b3d3-9a073fba0deb","Type":"ContainerStarted","Data":"b507a2bb180f2ffd19270c0d08d28d0fa90362de5d1a7cbd05fee396b8b9e4cb"} Sep 30 00:10:21 crc kubenswrapper[4922]: I0930 00:10:21.554383 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:21 crc kubenswrapper[4922]: I0930 00:10:21.556050 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-776d95b874-qp56h" event={"ID":"fc184a80-6988-4fa7-9ca7-b58db26d4fe3","Type":"ContainerStarted","Data":"b49f1b57332cf70491d2fd3dcc83b3f40ab9c56136ac26dab0c94c647f782b42"} Sep 30 00:10:21 crc kubenswrapper[4922]: I0930 00:10:21.557815 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-9cc9d7476-w7smm" event={"ID":"8f8a75ca-eefb-4908-88db-d35951495a62","Type":"ContainerStarted","Data":"2f286e214b95c0517496e3faf01c4cab59d6fe971ebc2ebc47e53ec6526ebc4b"} Sep 30 00:10:21 crc kubenswrapper[4922]: I0930 00:10:21.575724 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-5d4f95dff7-7mnm2" podStartSLOduration=2.575694655 podStartE2EDuration="2.575694655s" podCreationTimestamp="2025-09-30 00:10:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:10:21.572890366 +0000 UTC m=+6225.883179179" watchObservedRunningTime="2025-09-30 00:10:21.575694655 +0000 UTC m=+6225.885983508" Sep 30 00:10:24 crc kubenswrapper[4922]: I0930 00:10:24.602694 4922 generic.go:334] "Generic (PLEG): container finished" podID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerID="f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0" exitCode=0 Sep 30 00:10:24 crc kubenswrapper[4922]: I0930 00:10:24.602734 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749d8576cc-xm7k7" event={"ID":"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5","Type":"ContainerDied","Data":"f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0"} Sep 30 00:10:24 crc kubenswrapper[4922]: I0930 00:10:24.607736 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-9cc9d7476-w7smm" event={"ID":"8f8a75ca-eefb-4908-88db-d35951495a62","Type":"ContainerStarted","Data":"549922d9ed27497683c9b827b21316d50beedc93a8c64ee878d52bb12f57f053"} Sep 30 00:10:24 crc kubenswrapper[4922]: I0930 00:10:24.607882 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:24 crc kubenswrapper[4922]: I0930 00:10:24.611884 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-776d95b874-qp56h" event={"ID":"fc184a80-6988-4fa7-9ca7-b58db26d4fe3","Type":"ContainerStarted","Data":"2b1bd7cfb324a00d638acbd1ca8e50a7cd199fcb97c1f04910cfb542ba7d1c67"} Sep 30 00:10:24 crc kubenswrapper[4922]: I0930 00:10:24.612196 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:24 crc kubenswrapper[4922]: I0930 00:10:24.650684 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-776d95b874-qp56h" podStartSLOduration=2.2561221160000002 podStartE2EDuration="4.650661471s" podCreationTimestamp="2025-09-30 00:10:20 +0000 UTC" firstStartedPulling="2025-09-30 00:10:21.018578072 +0000 UTC m=+6225.328866885" lastFinishedPulling="2025-09-30 00:10:23.413117427 +0000 UTC m=+6227.723406240" observedRunningTime="2025-09-30 00:10:24.649288957 +0000 UTC m=+6228.959577810" watchObservedRunningTime="2025-09-30 00:10:24.650661471 +0000 UTC m=+6228.960950284" Sep 30 00:10:24 crc kubenswrapper[4922]: I0930 00:10:24.653131 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-9cc9d7476-w7smm" podStartSLOduration=2.260929283 podStartE2EDuration="4.653121761s" podCreationTimestamp="2025-09-30 00:10:20 +0000 UTC" firstStartedPulling="2025-09-30 00:10:21.024046977 +0000 UTC m=+6225.334335780" lastFinishedPulling="2025-09-30 00:10:23.416239405 +0000 UTC m=+6227.726528258" observedRunningTime="2025-09-30 00:10:24.635168758 +0000 UTC m=+6228.945457581" watchObservedRunningTime="2025-09-30 00:10:24.653121761 +0000 UTC m=+6228.963410574" Sep 30 00:10:25 crc kubenswrapper[4922]: I0930 00:10:25.422898 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:10:25 crc kubenswrapper[4922]: E0930 00:10:25.423423 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:10:28 crc kubenswrapper[4922]: I0930 00:10:28.721264 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-749d8576cc-xm7k7" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Sep 30 00:10:31 crc kubenswrapper[4922]: I0930 00:10:31.954187 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-776d95b874-qp56h" Sep 30 00:10:32 crc kubenswrapper[4922]: I0930 00:10:32.851036 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-9cc9d7476-w7smm" Sep 30 00:10:37 crc kubenswrapper[4922]: I0930 00:10:37.422012 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:10:37 crc kubenswrapper[4922]: E0930 00:10:37.422871 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:10:38 crc kubenswrapper[4922]: I0930 00:10:38.721122 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-749d8576cc-xm7k7" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Sep 30 00:10:40 crc kubenswrapper[4922]: I0930 00:10:40.342672 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-5d4f95dff7-7mnm2" Sep 30 00:10:48 crc kubenswrapper[4922]: I0930 00:10:48.422261 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:10:48 crc kubenswrapper[4922]: E0930 00:10:48.422866 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:10:48 crc kubenswrapper[4922]: I0930 00:10:48.722659 4922 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-749d8576cc-xm7k7" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.112:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.112:8080: connect: connection refused" Sep 30 00:10:48 crc kubenswrapper[4922]: I0930 00:10:48.722816 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.468152 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.596268 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxxfr\" (UniqueName: \"kubernetes.io/projected/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-kube-api-access-bxxfr\") pod \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.596968 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-logs\") pod \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.597103 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-scripts\") pod \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.597245 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-horizon-secret-key\") pod \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.597507 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-config-data\") pod \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\" (UID: \"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5\") " Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.597641 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-logs" (OuterVolumeSpecName: "logs") pod "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" (UID: "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.598472 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-logs\") on node \"crc\" DevicePath \"\"" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.606087 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-kube-api-access-bxxfr" (OuterVolumeSpecName: "kube-api-access-bxxfr") pod "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" (UID: "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5"). InnerVolumeSpecName "kube-api-access-bxxfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.608255 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" (UID: "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.633094 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-scripts" (OuterVolumeSpecName: "scripts") pod "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" (UID: "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.666892 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-config-data" (OuterVolumeSpecName: "config-data") pod "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" (UID: "0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.700834 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.700866 4922 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.700884 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.700898 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxxfr\" (UniqueName: \"kubernetes.io/projected/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5-kube-api-access-bxxfr\") on node \"crc\" DevicePath \"\"" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.929573 4922 generic.go:334] "Generic (PLEG): container finished" podID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerID="9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f" exitCode=137 Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.929664 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749d8576cc-xm7k7" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.929669 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749d8576cc-xm7k7" event={"ID":"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5","Type":"ContainerDied","Data":"9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f"} Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.930214 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749d8576cc-xm7k7" event={"ID":"0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5","Type":"ContainerDied","Data":"536cbddf0e6dfaebd7fe9f3810dad8a244c20239b0d74ae32f369ff25235c321"} Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.930255 4922 scope.go:117] "RemoveContainer" containerID="f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0" Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.977911 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-749d8576cc-xm7k7"] Sep 30 00:10:51 crc kubenswrapper[4922]: I0930 00:10:51.989189 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-749d8576cc-xm7k7"] Sep 30 00:10:52 crc kubenswrapper[4922]: I0930 00:10:52.116991 4922 scope.go:117] "RemoveContainer" containerID="9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f" Sep 30 00:10:52 crc kubenswrapper[4922]: I0930 00:10:52.161346 4922 scope.go:117] "RemoveContainer" containerID="f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0" Sep 30 00:10:52 crc kubenswrapper[4922]: E0930 00:10:52.164476 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0\": container with ID starting with f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0 not found: ID does not exist" containerID="f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0" Sep 30 00:10:52 crc kubenswrapper[4922]: I0930 00:10:52.164596 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0"} err="failed to get container status \"f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0\": rpc error: code = NotFound desc = could not find container \"f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0\": container with ID starting with f04c224240a125627fd36173e50b7d24f1a20c88a3ef49299f8b5e3a50243fd0 not found: ID does not exist" Sep 30 00:10:52 crc kubenswrapper[4922]: I0930 00:10:52.164621 4922 scope.go:117] "RemoveContainer" containerID="9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f" Sep 30 00:10:52 crc kubenswrapper[4922]: E0930 00:10:52.165548 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f\": container with ID starting with 9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f not found: ID does not exist" containerID="9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f" Sep 30 00:10:52 crc kubenswrapper[4922]: I0930 00:10:52.165581 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f"} err="failed to get container status \"9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f\": rpc error: code = NotFound desc = could not find container \"9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f\": container with ID starting with 9f7d91fdc8e93b7f5a3f7a800ab03d55fb810946494fef06646796240aa85e5f not found: ID does not exist" Sep 30 00:10:52 crc kubenswrapper[4922]: I0930 00:10:52.443855 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" path="/var/lib/kubelet/pods/0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5/volumes" Sep 30 00:10:54 crc kubenswrapper[4922]: I0930 00:10:54.987730 4922 scope.go:117] "RemoveContainer" containerID="3c7820562bc640373de4301ca6d8794484dbdb87f9e588dd4593ee41dd86ad28" Sep 30 00:10:55 crc kubenswrapper[4922]: I0930 00:10:55.014283 4922 scope.go:117] "RemoveContainer" containerID="f7c76236e3b2fe37d36785d46cc8bfdd5c3a705a43cc0ba924809d220bb606bd" Sep 30 00:10:55 crc kubenswrapper[4922]: I0930 00:10:55.095207 4922 scope.go:117] "RemoveContainer" containerID="82d75ed960de3c94560416552d7c60a592c0ade2811cd0906e3a8c81f75f56af" Sep 30 00:11:03 crc kubenswrapper[4922]: I0930 00:11:03.422714 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:11:03 crc kubenswrapper[4922]: E0930 00:11:03.424217 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.073496 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-ndjw5"] Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.086720 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-wx9vk"] Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.099158 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-ndjw5"] Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.108113 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-6jbpz"] Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.116998 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-6jbpz"] Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.125052 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-wx9vk"] Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.421801 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:11:14 crc kubenswrapper[4922]: E0930 00:11:14.422169 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.449678 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="197a91a3-1cbf-40c2-bda0-84a389e78366" path="/var/lib/kubelet/pods/197a91a3-1cbf-40c2-bda0-84a389e78366/volumes" Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.450830 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="639a2c8b-1225-4c52-8da9-fd1e6d24f38b" path="/var/lib/kubelet/pods/639a2c8b-1225-4c52-8da9-fd1e6d24f38b/volumes" Sep 30 00:11:14 crc kubenswrapper[4922]: I0930 00:11:14.452430 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f158e45-b59f-4f85-95f2-4c9c5a410984" path="/var/lib/kubelet/pods/6f158e45-b59f-4f85-95f2-4c9c5a410984/volumes" Sep 30 00:11:25 crc kubenswrapper[4922]: I0930 00:11:25.039383 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-a486-account-create-m77qk"] Sep 30 00:11:25 crc kubenswrapper[4922]: I0930 00:11:25.046904 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-534a-account-create-mntx5"] Sep 30 00:11:25 crc kubenswrapper[4922]: I0930 00:11:25.060288 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-534a-account-create-mntx5"] Sep 30 00:11:25 crc kubenswrapper[4922]: I0930 00:11:25.074459 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-a486-account-create-m77qk"] Sep 30 00:11:25 crc kubenswrapper[4922]: I0930 00:11:25.082357 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-5829-account-create-86h9c"] Sep 30 00:11:25 crc kubenswrapper[4922]: I0930 00:11:25.088766 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-5829-account-create-86h9c"] Sep 30 00:11:26 crc kubenswrapper[4922]: I0930 00:11:26.444312 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25d16583-e8ae-480d-8814-35e0051537f7" path="/var/lib/kubelet/pods/25d16583-e8ae-480d-8814-35e0051537f7/volumes" Sep 30 00:11:26 crc kubenswrapper[4922]: I0930 00:11:26.460667 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9112b115-b748-4278-8a10-01ccbadf6c77" path="/var/lib/kubelet/pods/9112b115-b748-4278-8a10-01ccbadf6c77/volumes" Sep 30 00:11:26 crc kubenswrapper[4922]: I0930 00:11:26.461838 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1d4f130-a86e-4fca-aa7b-79013b3b18a0" path="/var/lib/kubelet/pods/d1d4f130-a86e-4fca-aa7b-79013b3b18a0/volumes" Sep 30 00:11:27 crc kubenswrapper[4922]: I0930 00:11:27.422594 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:11:27 crc kubenswrapper[4922]: E0930 00:11:27.423039 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:11:34 crc kubenswrapper[4922]: I0930 00:11:34.054451 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nt96d"] Sep 30 00:11:34 crc kubenswrapper[4922]: I0930 00:11:34.069314 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-nt96d"] Sep 30 00:11:34 crc kubenswrapper[4922]: I0930 00:11:34.439489 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ea179b5-caac-40f8-87d1-a207140c8752" path="/var/lib/kubelet/pods/4ea179b5-caac-40f8-87d1-a207140c8752/volumes" Sep 30 00:11:38 crc kubenswrapper[4922]: I0930 00:11:38.424120 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:11:39 crc kubenswrapper[4922]: I0930 00:11:39.483102 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"a6f6de9491a4e56e81d9e945f5a62e49838ba5953cd0f64477adcdc4f24b42ea"} Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.229949 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc"] Sep 30 00:11:45 crc kubenswrapper[4922]: E0930 00:11:45.230992 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.231007 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon" Sep 30 00:11:45 crc kubenswrapper[4922]: E0930 00:11:45.231027 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon-log" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.231035 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon-log" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.231318 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon-log" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.231340 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b6bfd79-b7f3-4858-a095-4cbf2e6db3f5" containerName="horizon" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.235247 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.237288 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.249309 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc"] Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.312379 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.312593 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flgs5\" (UniqueName: \"kubernetes.io/projected/537ab108-9132-4e26-b55a-821c042d8c0d-kube-api-access-flgs5\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.312662 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.414488 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flgs5\" (UniqueName: \"kubernetes.io/projected/537ab108-9132-4e26-b55a-821c042d8c0d-kube-api-access-flgs5\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.414622 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.414666 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.415191 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.415213 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.432515 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flgs5\" (UniqueName: \"kubernetes.io/projected/537ab108-9132-4e26-b55a-821c042d8c0d-kube-api-access-flgs5\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:45 crc kubenswrapper[4922]: I0930 00:11:45.562520 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:46 crc kubenswrapper[4922]: I0930 00:11:46.181634 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc"] Sep 30 00:11:46 crc kubenswrapper[4922]: W0930 00:11:46.190934 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod537ab108_9132_4e26_b55a_821c042d8c0d.slice/crio-f9806f542aa2e96502ac3ed9b8bc96bf708b99d3cfb37842bd8cfc1d31f730c8 WatchSource:0}: Error finding container f9806f542aa2e96502ac3ed9b8bc96bf708b99d3cfb37842bd8cfc1d31f730c8: Status 404 returned error can't find the container with id f9806f542aa2e96502ac3ed9b8bc96bf708b99d3cfb37842bd8cfc1d31f730c8 Sep 30 00:11:46 crc kubenswrapper[4922]: I0930 00:11:46.553242 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" event={"ID":"537ab108-9132-4e26-b55a-821c042d8c0d","Type":"ContainerStarted","Data":"a372f8439d11b09e931d2fbd5e456b9d90cb85d0bc225b9eab82b702fa521dd0"} Sep 30 00:11:46 crc kubenswrapper[4922]: I0930 00:11:46.553752 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" event={"ID":"537ab108-9132-4e26-b55a-821c042d8c0d","Type":"ContainerStarted","Data":"f9806f542aa2e96502ac3ed9b8bc96bf708b99d3cfb37842bd8cfc1d31f730c8"} Sep 30 00:11:47 crc kubenswrapper[4922]: I0930 00:11:47.030548 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmp5d"] Sep 30 00:11:47 crc kubenswrapper[4922]: I0930 00:11:47.041641 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-bmp5d"] Sep 30 00:11:47 crc kubenswrapper[4922]: I0930 00:11:47.566587 4922 generic.go:334] "Generic (PLEG): container finished" podID="537ab108-9132-4e26-b55a-821c042d8c0d" containerID="a372f8439d11b09e931d2fbd5e456b9d90cb85d0bc225b9eab82b702fa521dd0" exitCode=0 Sep 30 00:11:47 crc kubenswrapper[4922]: I0930 00:11:47.566645 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" event={"ID":"537ab108-9132-4e26-b55a-821c042d8c0d","Type":"ContainerDied","Data":"a372f8439d11b09e931d2fbd5e456b9d90cb85d0bc225b9eab82b702fa521dd0"} Sep 30 00:11:48 crc kubenswrapper[4922]: I0930 00:11:48.031709 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-vl9l2"] Sep 30 00:11:48 crc kubenswrapper[4922]: I0930 00:11:48.044706 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-vl9l2"] Sep 30 00:11:48 crc kubenswrapper[4922]: I0930 00:11:48.442425 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9539613f-19af-430f-bcbc-4dd019ba71bf" path="/var/lib/kubelet/pods/9539613f-19af-430f-bcbc-4dd019ba71bf/volumes" Sep 30 00:11:48 crc kubenswrapper[4922]: I0930 00:11:48.445453 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cbedef-764b-476b-a191-00443706e40d" path="/var/lib/kubelet/pods/b6cbedef-764b-476b-a191-00443706e40d/volumes" Sep 30 00:11:50 crc kubenswrapper[4922]: I0930 00:11:50.609940 4922 generic.go:334] "Generic (PLEG): container finished" podID="537ab108-9132-4e26-b55a-821c042d8c0d" containerID="f5a10fc9636283f2c43462cbec45097a6f6f5f9f321d3ee196ee14cf2e23f64c" exitCode=0 Sep 30 00:11:50 crc kubenswrapper[4922]: I0930 00:11:50.610224 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" event={"ID":"537ab108-9132-4e26-b55a-821c042d8c0d","Type":"ContainerDied","Data":"f5a10fc9636283f2c43462cbec45097a6f6f5f9f321d3ee196ee14cf2e23f64c"} Sep 30 00:11:51 crc kubenswrapper[4922]: I0930 00:11:51.655606 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" event={"ID":"537ab108-9132-4e26-b55a-821c042d8c0d","Type":"ContainerStarted","Data":"9a10982a9c3e9bbb07b659330e443c2983485d82d86bef098e2884f2e45c801f"} Sep 30 00:11:51 crc kubenswrapper[4922]: I0930 00:11:51.698163 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" podStartSLOduration=4.113259947 podStartE2EDuration="6.698145785s" podCreationTimestamp="2025-09-30 00:11:45 +0000 UTC" firstStartedPulling="2025-09-30 00:11:47.568889524 +0000 UTC m=+6311.879178347" lastFinishedPulling="2025-09-30 00:11:50.153775372 +0000 UTC m=+6314.464064185" observedRunningTime="2025-09-30 00:11:51.690550737 +0000 UTC m=+6316.000839550" watchObservedRunningTime="2025-09-30 00:11:51.698145785 +0000 UTC m=+6316.008434598" Sep 30 00:11:52 crc kubenswrapper[4922]: I0930 00:11:52.668272 4922 generic.go:334] "Generic (PLEG): container finished" podID="537ab108-9132-4e26-b55a-821c042d8c0d" containerID="9a10982a9c3e9bbb07b659330e443c2983485d82d86bef098e2884f2e45c801f" exitCode=0 Sep 30 00:11:52 crc kubenswrapper[4922]: I0930 00:11:52.668358 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" event={"ID":"537ab108-9132-4e26-b55a-821c042d8c0d","Type":"ContainerDied","Data":"9a10982a9c3e9bbb07b659330e443c2983485d82d86bef098e2884f2e45c801f"} Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.103233 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.215528 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-util\") pod \"537ab108-9132-4e26-b55a-821c042d8c0d\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.215674 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-bundle\") pod \"537ab108-9132-4e26-b55a-821c042d8c0d\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.215707 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flgs5\" (UniqueName: \"kubernetes.io/projected/537ab108-9132-4e26-b55a-821c042d8c0d-kube-api-access-flgs5\") pod \"537ab108-9132-4e26-b55a-821c042d8c0d\" (UID: \"537ab108-9132-4e26-b55a-821c042d8c0d\") " Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.218982 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-bundle" (OuterVolumeSpecName: "bundle") pod "537ab108-9132-4e26-b55a-821c042d8c0d" (UID: "537ab108-9132-4e26-b55a-821c042d8c0d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.223718 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/537ab108-9132-4e26-b55a-821c042d8c0d-kube-api-access-flgs5" (OuterVolumeSpecName: "kube-api-access-flgs5") pod "537ab108-9132-4e26-b55a-821c042d8c0d" (UID: "537ab108-9132-4e26-b55a-821c042d8c0d"). InnerVolumeSpecName "kube-api-access-flgs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.235874 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-util" (OuterVolumeSpecName: "util") pod "537ab108-9132-4e26-b55a-821c042d8c0d" (UID: "537ab108-9132-4e26-b55a-821c042d8c0d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.318194 4922 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-util\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.318229 4922 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/537ab108-9132-4e26-b55a-821c042d8c0d-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.318241 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flgs5\" (UniqueName: \"kubernetes.io/projected/537ab108-9132-4e26-b55a-821c042d8c0d-kube-api-access-flgs5\") on node \"crc\" DevicePath \"\"" Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.691941 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" event={"ID":"537ab108-9132-4e26-b55a-821c042d8c0d","Type":"ContainerDied","Data":"f9806f542aa2e96502ac3ed9b8bc96bf708b99d3cfb37842bd8cfc1d31f730c8"} Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.691989 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9806f542aa2e96502ac3ed9b8bc96bf708b99d3cfb37842bd8cfc1d31f730c8" Sep 30 00:11:54 crc kubenswrapper[4922]: I0930 00:11:54.691997 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc" Sep 30 00:11:55 crc kubenswrapper[4922]: I0930 00:11:55.219083 4922 scope.go:117] "RemoveContainer" containerID="42d181379f893cab83346a862727ae49b5b1c680a1b745d334542fe7e72dcd19" Sep 30 00:11:55 crc kubenswrapper[4922]: I0930 00:11:55.262957 4922 scope.go:117] "RemoveContainer" containerID="4c5fa365c41225c547f67b2374dc1355fcbceacd408cde37d1f2951e28a55885" Sep 30 00:11:55 crc kubenswrapper[4922]: I0930 00:11:55.320196 4922 scope.go:117] "RemoveContainer" containerID="c90e8e4b231775cd1c41b07b43a81edf1e083525c0c2b33155ba4a42d68d0c97" Sep 30 00:11:55 crc kubenswrapper[4922]: I0930 00:11:55.372256 4922 scope.go:117] "RemoveContainer" containerID="f29183db1b8e1156b5b763de87dfc3c18da749229f3a705e89a995b5dc99feb6" Sep 30 00:11:55 crc kubenswrapper[4922]: I0930 00:11:55.435652 4922 scope.go:117] "RemoveContainer" containerID="742fc4c33e22984c7512b89da2bcab454e1e64bf4a7ec3206a6c14b680c243dd" Sep 30 00:11:55 crc kubenswrapper[4922]: I0930 00:11:55.471156 4922 scope.go:117] "RemoveContainer" containerID="7813a14f6188ffd7ae61c55494fe3e2e1b2c902514958b3274892cce1d0e009f" Sep 30 00:11:55 crc kubenswrapper[4922]: I0930 00:11:55.530450 4922 scope.go:117] "RemoveContainer" containerID="9a7e3e16a8f22de1aca49d40241029e58abe48de5e00bd748ffa2d2e8dcb6333" Sep 30 00:11:55 crc kubenswrapper[4922]: I0930 00:11:55.552332 4922 scope.go:117] "RemoveContainer" containerID="ad5d44c5e60f154a395c690d22daeffb0459ce8e058e9b20360b64975c9077b6" Sep 30 00:11:55 crc kubenswrapper[4922]: I0930 00:11:55.575369 4922 scope.go:117] "RemoveContainer" containerID="109a6286f8aedb4b5bcbf1b2b428cba1e3d9c15de19055ca0934a454b14a9783" Sep 30 00:12:07 crc kubenswrapper[4922]: I0930 00:12:07.070291 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-57nb4"] Sep 30 00:12:07 crc kubenswrapper[4922]: I0930 00:12:07.080410 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-57nb4"] Sep 30 00:12:08 crc kubenswrapper[4922]: I0930 00:12:08.433774 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcef0ab0-3df5-4835-b938-9c49e92ae366" path="/var/lib/kubelet/pods/fcef0ab0-3df5-4835-b938-9c49e92ae366/volumes" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.868593 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr"] Sep 30 00:12:11 crc kubenswrapper[4922]: E0930 00:12:11.869706 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537ab108-9132-4e26-b55a-821c042d8c0d" containerName="util" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.869725 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="537ab108-9132-4e26-b55a-821c042d8c0d" containerName="util" Sep 30 00:12:11 crc kubenswrapper[4922]: E0930 00:12:11.869749 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537ab108-9132-4e26-b55a-821c042d8c0d" containerName="pull" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.869759 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="537ab108-9132-4e26-b55a-821c042d8c0d" containerName="pull" Sep 30 00:12:11 crc kubenswrapper[4922]: E0930 00:12:11.869790 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="537ab108-9132-4e26-b55a-821c042d8c0d" containerName="extract" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.869798 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="537ab108-9132-4e26-b55a-821c042d8c0d" containerName="extract" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.870058 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="537ab108-9132-4e26-b55a-821c042d8c0d" containerName="extract" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.870833 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.873495 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.874012 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.874366 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-jnv25" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.897753 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr"] Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.929194 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nksfp\" (UniqueName: \"kubernetes.io/projected/5ed9b0d2-2c6a-4526-8a40-c4361f9020f9-kube-api-access-nksfp\") pod \"obo-prometheus-operator-7c8cf85677-sdjrr\" (UID: \"5ed9b0d2-2c6a-4526-8a40-c4361f9020f9\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.987644 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v"] Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.993003 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.995604 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-hgw74" Sep 30 00:12:11 crc kubenswrapper[4922]: I0930 00:12:11.995782 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.005266 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v"] Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.015736 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht"] Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.017998 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.030468 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht"] Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.031092 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nksfp\" (UniqueName: \"kubernetes.io/projected/5ed9b0d2-2c6a-4526-8a40-c4361f9020f9-kube-api-access-nksfp\") pod \"obo-prometheus-operator-7c8cf85677-sdjrr\" (UID: \"5ed9b0d2-2c6a-4526-8a40-c4361f9020f9\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.060852 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nksfp\" (UniqueName: \"kubernetes.io/projected/5ed9b0d2-2c6a-4526-8a40-c4361f9020f9-kube-api-access-nksfp\") pod \"obo-prometheus-operator-7c8cf85677-sdjrr\" (UID: \"5ed9b0d2-2c6a-4526-8a40-c4361f9020f9\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.127254 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-tdgkh"] Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.128613 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.132095 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.132805 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/42795bdf-428c-4e42-a1bd-c410f3984a18-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht\" (UID: \"42795bdf-428c-4e42-a1bd-c410f3984a18\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.132849 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/42795bdf-428c-4e42-a1bd-c410f3984a18-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht\" (UID: \"42795bdf-428c-4e42-a1bd-c410f3984a18\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.132899 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1cb5b40d-54dd-4610-a982-3490f932ac7e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v\" (UID: \"1cb5b40d-54dd-4610-a982-3490f932ac7e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.132951 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1cb5b40d-54dd-4610-a982-3490f932ac7e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v\" (UID: \"1cb5b40d-54dd-4610-a982-3490f932ac7e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.137583 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-j45lb" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.153763 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-tdgkh"] Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.205132 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.239793 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/3dfa4ab9-ec96-4c28-bf02-9395095442e2-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-tdgkh\" (UID: \"3dfa4ab9-ec96-4c28-bf02-9395095442e2\") " pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.239837 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1cb5b40d-54dd-4610-a982-3490f932ac7e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v\" (UID: \"1cb5b40d-54dd-4610-a982-3490f932ac7e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.239882 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1cb5b40d-54dd-4610-a982-3490f932ac7e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v\" (UID: \"1cb5b40d-54dd-4610-a982-3490f932ac7e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.239986 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw976\" (UniqueName: \"kubernetes.io/projected/3dfa4ab9-ec96-4c28-bf02-9395095442e2-kube-api-access-kw976\") pod \"observability-operator-cc5f78dfc-tdgkh\" (UID: \"3dfa4ab9-ec96-4c28-bf02-9395095442e2\") " pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.240013 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/42795bdf-428c-4e42-a1bd-c410f3984a18-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht\" (UID: \"42795bdf-428c-4e42-a1bd-c410f3984a18\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.240035 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/42795bdf-428c-4e42-a1bd-c410f3984a18-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht\" (UID: \"42795bdf-428c-4e42-a1bd-c410f3984a18\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.252894 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/42795bdf-428c-4e42-a1bd-c410f3984a18-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht\" (UID: \"42795bdf-428c-4e42-a1bd-c410f3984a18\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.258915 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1cb5b40d-54dd-4610-a982-3490f932ac7e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v\" (UID: \"1cb5b40d-54dd-4610-a982-3490f932ac7e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.270381 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1cb5b40d-54dd-4610-a982-3490f932ac7e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v\" (UID: \"1cb5b40d-54dd-4610-a982-3490f932ac7e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.279915 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/42795bdf-428c-4e42-a1bd-c410f3984a18-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht\" (UID: \"42795bdf-428c-4e42-a1bd-c410f3984a18\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.313814 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.335264 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.342888 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/3dfa4ab9-ec96-4c28-bf02-9395095442e2-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-tdgkh\" (UID: \"3dfa4ab9-ec96-4c28-bf02-9395095442e2\") " pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.343058 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw976\" (UniqueName: \"kubernetes.io/projected/3dfa4ab9-ec96-4c28-bf02-9395095442e2-kube-api-access-kw976\") pod \"observability-operator-cc5f78dfc-tdgkh\" (UID: \"3dfa4ab9-ec96-4c28-bf02-9395095442e2\") " pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.355175 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/3dfa4ab9-ec96-4c28-bf02-9395095442e2-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-tdgkh\" (UID: \"3dfa4ab9-ec96-4c28-bf02-9395095442e2\") " pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.386111 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw976\" (UniqueName: \"kubernetes.io/projected/3dfa4ab9-ec96-4c28-bf02-9395095442e2-kube-api-access-kw976\") pod \"observability-operator-cc5f78dfc-tdgkh\" (UID: \"3dfa4ab9-ec96-4c28-bf02-9395095442e2\") " pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.456809 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.473916 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-wlsdp"] Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.475078 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-wlsdp"] Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.475155 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.502666 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-k6r78" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.559004 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpgbv\" (UniqueName: \"kubernetes.io/projected/3e46edec-6c01-4236-a0ca-3125fb7126db-kube-api-access-bpgbv\") pod \"perses-operator-54bc95c9fb-wlsdp\" (UID: \"3e46edec-6c01-4236-a0ca-3125fb7126db\") " pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.559042 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/3e46edec-6c01-4236-a0ca-3125fb7126db-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-wlsdp\" (UID: \"3e46edec-6c01-4236-a0ca-3125fb7126db\") " pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.661254 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpgbv\" (UniqueName: \"kubernetes.io/projected/3e46edec-6c01-4236-a0ca-3125fb7126db-kube-api-access-bpgbv\") pod \"perses-operator-54bc95c9fb-wlsdp\" (UID: \"3e46edec-6c01-4236-a0ca-3125fb7126db\") " pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.661549 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/3e46edec-6c01-4236-a0ca-3125fb7126db-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-wlsdp\" (UID: \"3e46edec-6c01-4236-a0ca-3125fb7126db\") " pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.662919 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/3e46edec-6c01-4236-a0ca-3125fb7126db-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-wlsdp\" (UID: \"3e46edec-6c01-4236-a0ca-3125fb7126db\") " pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.686384 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpgbv\" (UniqueName: \"kubernetes.io/projected/3e46edec-6c01-4236-a0ca-3125fb7126db-kube-api-access-bpgbv\") pod \"perses-operator-54bc95c9fb-wlsdp\" (UID: \"3e46edec-6c01-4236-a0ca-3125fb7126db\") " pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:12 crc kubenswrapper[4922]: I0930 00:12:12.836994 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.043401 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v"] Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.067239 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht"] Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.086607 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr"] Sep 30 00:12:13 crc kubenswrapper[4922]: W0930 00:12:13.098608 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ed9b0d2_2c6a_4526_8a40_c4361f9020f9.slice/crio-9974a1724221e197b135ddeab54e8d3ab173dde1a9e0dffaba132cb82e99fa68 WatchSource:0}: Error finding container 9974a1724221e197b135ddeab54e8d3ab173dde1a9e0dffaba132cb82e99fa68: Status 404 returned error can't find the container with id 9974a1724221e197b135ddeab54e8d3ab173dde1a9e0dffaba132cb82e99fa68 Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.242258 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-tdgkh"] Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.410564 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-wlsdp"] Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.950526 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" event={"ID":"3dfa4ab9-ec96-4c28-bf02-9395095442e2","Type":"ContainerStarted","Data":"53f39ed7cf85300c38f30288c36e101c4168f7ba889e0d2675a3cab006671d06"} Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.953306 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr" event={"ID":"5ed9b0d2-2c6a-4526-8a40-c4361f9020f9","Type":"ContainerStarted","Data":"9974a1724221e197b135ddeab54e8d3ab173dde1a9e0dffaba132cb82e99fa68"} Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.956321 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" event={"ID":"1cb5b40d-54dd-4610-a982-3490f932ac7e","Type":"ContainerStarted","Data":"038524bb8b7cd6b6b6ebc6f27b43aba5092bdbe82d2fdfc86a37d4f80328eff4"} Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.957476 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" event={"ID":"3e46edec-6c01-4236-a0ca-3125fb7126db","Type":"ContainerStarted","Data":"37e8af279bdc7527928cdc9dce7d511e360a25afbb2f6ccf5ae0e5ad3c5af362"} Sep 30 00:12:13 crc kubenswrapper[4922]: I0930 00:12:13.958694 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" event={"ID":"42795bdf-428c-4e42-a1bd-c410f3984a18","Type":"ContainerStarted","Data":"a25ffb9157f4694be33ff26affd61e9d4f1389b99fd9fa3b38b86c85fdc45286"} Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.076519 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" event={"ID":"42795bdf-428c-4e42-a1bd-c410f3984a18","Type":"ContainerStarted","Data":"9c96dfcdaff9663a53daf4aca61a5b5f1875e98a892a2166d192ae7e45f3993b"} Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.078710 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" event={"ID":"3dfa4ab9-ec96-4c28-bf02-9395095442e2","Type":"ContainerStarted","Data":"a8a103802987817bc502935210df497352fee353230cb64093c87af48ab8d465"} Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.078855 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.080276 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr" event={"ID":"5ed9b0d2-2c6a-4526-8a40-c4361f9020f9","Type":"ContainerStarted","Data":"00d817c21efa46010e16a5cebbb6fb94edcc3c038d7812c1dde6bad595e0f299"} Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.080913 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.082997 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" event={"ID":"1cb5b40d-54dd-4610-a982-3490f932ac7e","Type":"ContainerStarted","Data":"efad8c8ba90eb0c028dbd0c5053352a08278c125699b49ca11c75cf4b5f7f71b"} Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.084811 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" event={"ID":"3e46edec-6c01-4236-a0ca-3125fb7126db","Type":"ContainerStarted","Data":"63ed076c5f08bda1b20fa094430d2ea8844b28e0beb4b7a88a2aad6b2ca0b03c"} Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.084941 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.095267 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht" podStartSLOduration=2.766923283 podStartE2EDuration="13.095252589s" podCreationTimestamp="2025-09-30 00:12:11 +0000 UTC" firstStartedPulling="2025-09-30 00:12:13.086578266 +0000 UTC m=+6337.396867079" lastFinishedPulling="2025-09-30 00:12:23.414907572 +0000 UTC m=+6347.725196385" observedRunningTime="2025-09-30 00:12:24.094913941 +0000 UTC m=+6348.405202774" watchObservedRunningTime="2025-09-30 00:12:24.095252589 +0000 UTC m=+6348.405541402" Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.125144 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-tdgkh" podStartSLOduration=1.833886938 podStartE2EDuration="12.125122167s" podCreationTimestamp="2025-09-30 00:12:12 +0000 UTC" firstStartedPulling="2025-09-30 00:12:13.243723738 +0000 UTC m=+6337.554012551" lastFinishedPulling="2025-09-30 00:12:23.534958967 +0000 UTC m=+6347.845247780" observedRunningTime="2025-09-30 00:12:24.11956404 +0000 UTC m=+6348.429852853" watchObservedRunningTime="2025-09-30 00:12:24.125122167 +0000 UTC m=+6348.435410980" Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.195926 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" podStartSLOduration=2.199655975 podStartE2EDuration="12.195906766s" podCreationTimestamp="2025-09-30 00:12:12 +0000 UTC" firstStartedPulling="2025-09-30 00:12:13.419179483 +0000 UTC m=+6337.729468296" lastFinishedPulling="2025-09-30 00:12:23.415430274 +0000 UTC m=+6347.725719087" observedRunningTime="2025-09-30 00:12:24.185022917 +0000 UTC m=+6348.495311730" watchObservedRunningTime="2025-09-30 00:12:24.195906766 +0000 UTC m=+6348.506195579" Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.197256 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v" podStartSLOduration=2.846881579 podStartE2EDuration="13.197247299s" podCreationTimestamp="2025-09-30 00:12:11 +0000 UTC" firstStartedPulling="2025-09-30 00:12:13.063715191 +0000 UTC m=+6337.374003994" lastFinishedPulling="2025-09-30 00:12:23.414080901 +0000 UTC m=+6347.724369714" observedRunningTime="2025-09-30 00:12:24.152699059 +0000 UTC m=+6348.462987882" watchObservedRunningTime="2025-09-30 00:12:24.197247299 +0000 UTC m=+6348.507536112" Sep 30 00:12:24 crc kubenswrapper[4922]: I0930 00:12:24.218249 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-sdjrr" podStartSLOduration=2.848675624 podStartE2EDuration="13.218232948s" podCreationTimestamp="2025-09-30 00:12:11 +0000 UTC" firstStartedPulling="2025-09-30 00:12:13.101155796 +0000 UTC m=+6337.411444599" lastFinishedPulling="2025-09-30 00:12:23.47071311 +0000 UTC m=+6347.781001923" observedRunningTime="2025-09-30 00:12:24.216965926 +0000 UTC m=+6348.527254739" watchObservedRunningTime="2025-09-30 00:12:24.218232948 +0000 UTC m=+6348.528521761" Sep 30 00:12:32 crc kubenswrapper[4922]: I0930 00:12:32.840111 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-wlsdp" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.651399 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.663868 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.696007 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 00:12:35 crc kubenswrapper[4922]: E0930 00:12:35.696459 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40533406-f1f6-4c7a-9377-d8c338936f67" containerName="openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.696472 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="40533406-f1f6-4c7a-9377-d8c338936f67" containerName="openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.696689 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="40533406-f1f6-4c7a-9377-d8c338936f67" containerName="openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.697574 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.718079 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.770006 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65bab339-0cd2-43f1-8387-dedc132cb9f3-openstack-config\") pod \"openstackclient\" (UID: \"65bab339-0cd2-43f1-8387-dedc132cb9f3\") " pod="openstack/openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.770068 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b475m\" (UniqueName: \"kubernetes.io/projected/65bab339-0cd2-43f1-8387-dedc132cb9f3-kube-api-access-b475m\") pod \"openstackclient\" (UID: \"65bab339-0cd2-43f1-8387-dedc132cb9f3\") " pod="openstack/openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.770101 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65bab339-0cd2-43f1-8387-dedc132cb9f3-openstack-config-secret\") pod \"openstackclient\" (UID: \"65bab339-0cd2-43f1-8387-dedc132cb9f3\") " pod="openstack/openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.871578 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65bab339-0cd2-43f1-8387-dedc132cb9f3-openstack-config\") pod \"openstackclient\" (UID: \"65bab339-0cd2-43f1-8387-dedc132cb9f3\") " pod="openstack/openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.871642 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b475m\" (UniqueName: \"kubernetes.io/projected/65bab339-0cd2-43f1-8387-dedc132cb9f3-kube-api-access-b475m\") pod \"openstackclient\" (UID: \"65bab339-0cd2-43f1-8387-dedc132cb9f3\") " pod="openstack/openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.871677 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65bab339-0cd2-43f1-8387-dedc132cb9f3-openstack-config-secret\") pod \"openstackclient\" (UID: \"65bab339-0cd2-43f1-8387-dedc132cb9f3\") " pod="openstack/openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.872696 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/65bab339-0cd2-43f1-8387-dedc132cb9f3-openstack-config\") pod \"openstackclient\" (UID: \"65bab339-0cd2-43f1-8387-dedc132cb9f3\") " pod="openstack/openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.880752 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/65bab339-0cd2-43f1-8387-dedc132cb9f3-openstack-config-secret\") pod \"openstackclient\" (UID: \"65bab339-0cd2-43f1-8387-dedc132cb9f3\") " pod="openstack/openstackclient" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.882282 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.883577 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.894426 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-q5cdg" Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.949360 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:12:35 crc kubenswrapper[4922]: I0930 00:12:35.974869 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hzj7\" (UniqueName: \"kubernetes.io/projected/f18060ca-aa4f-4b42-883b-f238c4784e37-kube-api-access-8hzj7\") pod \"kube-state-metrics-0\" (UID: \"f18060ca-aa4f-4b42-883b-f238c4784e37\") " pod="openstack/kube-state-metrics-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.007187 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b475m\" (UniqueName: \"kubernetes.io/projected/65bab339-0cd2-43f1-8387-dedc132cb9f3-kube-api-access-b475m\") pod \"openstackclient\" (UID: \"65bab339-0cd2-43f1-8387-dedc132cb9f3\") " pod="openstack/openstackclient" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.037037 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.077587 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hzj7\" (UniqueName: \"kubernetes.io/projected/f18060ca-aa4f-4b42-883b-f238c4784e37-kube-api-access-8hzj7\") pod \"kube-state-metrics-0\" (UID: \"f18060ca-aa4f-4b42-883b-f238c4784e37\") " pod="openstack/kube-state-metrics-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.115486 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hzj7\" (UniqueName: \"kubernetes.io/projected/f18060ca-aa4f-4b42-883b-f238c4784e37-kube-api-access-8hzj7\") pod \"kube-state-metrics-0\" (UID: \"f18060ca-aa4f-4b42-883b-f238c4784e37\") " pod="openstack/kube-state-metrics-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.254893 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="40533406-f1f6-4c7a-9377-d8c338936f67" containerName="openstackclient" containerID="cri-o://9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418" gracePeriod=2 Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.262825 4922 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="40533406-f1f6-4c7a-9377-d8c338936f67" podUID="65bab339-0cd2-43f1-8387-dedc132cb9f3" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.304870 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.773810 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.792468 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.804731 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.805147 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-vgq9w" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.805485 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.805627 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.837829 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.910926 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.910997 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.911040 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.911070 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.911138 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:36 crc kubenswrapper[4922]: I0930 00:12:36.911178 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5557\" (UniqueName: \"kubernetes.io/projected/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-kube-api-access-s5557\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.012603 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.012947 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.012989 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.013013 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.013057 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.013087 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5557\" (UniqueName: \"kubernetes.io/projected/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-kube-api-access-s5557\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.024932 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.030368 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.035406 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.044980 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.067684 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.071228 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5557\" (UniqueName: \"kubernetes.io/projected/6931ec30-85ca-4bdc-824d-4c2a72fa36f8-kube-api-access-s5557\") pod \"alertmanager-metric-storage-0\" (UID: \"6931ec30-85ca-4bdc-824d-4c2a72fa36f8\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.140278 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.256192 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.275561 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.283910 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.284049 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-l58l7" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.284127 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.289816 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.298821 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.299054 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.299376 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 00:12:37 crc kubenswrapper[4922]: W0930 00:12:37.303128 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65bab339_0cd2_43f1_8387_dedc132cb9f3.slice/crio-0cac66342378f035b1842cf8dee36b93e0465db0850ba715cebe9c500ca5d4d9 WatchSource:0}: Error finding container 0cac66342378f035b1842cf8dee36b93e0465db0850ba715cebe9c500ca5d4d9: Status 404 returned error can't find the container with id 0cac66342378f035b1842cf8dee36b93e0465db0850ba715cebe9c500ca5d4d9 Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.328375 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/fd2b9bb1-3eaf-4999-b558-f4778bae160b-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.328452 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xhqr\" (UniqueName: \"kubernetes.io/projected/fd2b9bb1-3eaf-4999-b558-f4778bae160b-kube-api-access-8xhqr\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.328519 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/fd2b9bb1-3eaf-4999-b558-f4778bae160b-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.328613 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/fd2b9bb1-3eaf-4999-b558-f4778bae160b-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.328641 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-1dc109e6-cbf3-41ef-8cf6-6db3cce7f72d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1dc109e6-cbf3-41ef-8cf6-6db3cce7f72d\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.328667 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/fd2b9bb1-3eaf-4999-b558-f4778bae160b-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.328701 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fd2b9bb1-3eaf-4999-b558-f4778bae160b-config\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.328737 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/fd2b9bb1-3eaf-4999-b558-f4778bae160b-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.355179 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.430282 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fd2b9bb1-3eaf-4999-b558-f4778bae160b-config\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.430342 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/fd2b9bb1-3eaf-4999-b558-f4778bae160b-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.430372 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/fd2b9bb1-3eaf-4999-b558-f4778bae160b-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.430484 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xhqr\" (UniqueName: \"kubernetes.io/projected/fd2b9bb1-3eaf-4999-b558-f4778bae160b-kube-api-access-8xhqr\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.430511 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/fd2b9bb1-3eaf-4999-b558-f4778bae160b-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.430603 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/fd2b9bb1-3eaf-4999-b558-f4778bae160b-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.430631 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-1dc109e6-cbf3-41ef-8cf6-6db3cce7f72d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1dc109e6-cbf3-41ef-8cf6-6db3cce7f72d\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.430661 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/fd2b9bb1-3eaf-4999-b558-f4778bae160b-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.433715 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/fd2b9bb1-3eaf-4999-b558-f4778bae160b-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.441187 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/fd2b9bb1-3eaf-4999-b558-f4778bae160b-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.441820 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.442469 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/fd2b9bb1-3eaf-4999-b558-f4778bae160b-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.442914 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fd2b9bb1-3eaf-4999-b558-f4778bae160b-config\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.450050 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/fd2b9bb1-3eaf-4999-b558-f4778bae160b-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.450534 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/fd2b9bb1-3eaf-4999-b558-f4778bae160b-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.455915 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xhqr\" (UniqueName: \"kubernetes.io/projected/fd2b9bb1-3eaf-4999-b558-f4778bae160b-kube-api-access-8xhqr\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.485523 4922 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.485558 4922 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-1dc109e6-cbf3-41ef-8cf6-6db3cce7f72d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1dc109e6-cbf3-41ef-8cf6-6db3cce7f72d\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7890d6adf21c9c454bacf746a5e64142187890a339f11fc0d414af7a5b113465/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.648066 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-1dc109e6-cbf3-41ef-8cf6-6db3cce7f72d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-1dc109e6-cbf3-41ef-8cf6-6db3cce7f72d\") pod \"prometheus-metric-storage-0\" (UID: \"fd2b9bb1-3eaf-4999-b558-f4778bae160b\") " pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.698889 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 00:12:37 crc kubenswrapper[4922]: I0930 00:12:37.946685 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Sep 30 00:12:37 crc kubenswrapper[4922]: W0930 00:12:37.982973 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6931ec30_85ca_4bdc_824d_4c2a72fa36f8.slice/crio-987fd8fed4dcee416be0dc775f6440470952b877407e2cb7f402d9553cd7ac9c WatchSource:0}: Error finding container 987fd8fed4dcee416be0dc775f6440470952b877407e2cb7f402d9553cd7ac9c: Status 404 returned error can't find the container with id 987fd8fed4dcee416be0dc775f6440470952b877407e2cb7f402d9553cd7ac9c Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.246740 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.299247 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"fd2b9bb1-3eaf-4999-b558-f4778bae160b","Type":"ContainerStarted","Data":"a1404a23a6fde7a812ab4f6545ea26c18cb1a17655f0f97f30a347adf589b991"} Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.303286 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"65bab339-0cd2-43f1-8387-dedc132cb9f3","Type":"ContainerStarted","Data":"bcba51cdb389f5d5a9b156436a03277e1b5655e78ca8d6dbeb4b91b9978cf970"} Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.303327 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"65bab339-0cd2-43f1-8387-dedc132cb9f3","Type":"ContainerStarted","Data":"0cac66342378f035b1842cf8dee36b93e0465db0850ba715cebe9c500ca5d4d9"} Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.306052 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6931ec30-85ca-4bdc-824d-4c2a72fa36f8","Type":"ContainerStarted","Data":"987fd8fed4dcee416be0dc775f6440470952b877407e2cb7f402d9553cd7ac9c"} Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.308458 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f18060ca-aa4f-4b42-883b-f238c4784e37","Type":"ContainerStarted","Data":"e7d088c78a06e83439ab9f9e691b9056a2a984b66b93300b14d774a9b0db8099"} Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.308490 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f18060ca-aa4f-4b42-883b-f238c4784e37","Type":"ContainerStarted","Data":"d6e406e76936ee3fd015fb38f18233a49f8ee205d2426c107d1329b64b53177d"} Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.308599 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.344905 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.344878848 podStartE2EDuration="3.344878848s" podCreationTimestamp="2025-09-30 00:12:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:12:38.32591773 +0000 UTC m=+6362.636206543" watchObservedRunningTime="2025-09-30 00:12:38.344878848 +0000 UTC m=+6362.655167661" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.355786 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.85314354 podStartE2EDuration="3.355766337s" podCreationTimestamp="2025-09-30 00:12:35 +0000 UTC" firstStartedPulling="2025-09-30 00:12:37.479262334 +0000 UTC m=+6361.789551147" lastFinishedPulling="2025-09-30 00:12:37.981885141 +0000 UTC m=+6362.292173944" observedRunningTime="2025-09-30 00:12:38.341066654 +0000 UTC m=+6362.651355467" watchObservedRunningTime="2025-09-30 00:12:38.355766337 +0000 UTC m=+6362.666055150" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.548589 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.552609 4922 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="40533406-f1f6-4c7a-9377-d8c338936f67" podUID="65bab339-0cd2-43f1-8387-dedc132cb9f3" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.692104 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5b4n\" (UniqueName: \"kubernetes.io/projected/40533406-f1f6-4c7a-9377-d8c338936f67-kube-api-access-h5b4n\") pod \"40533406-f1f6-4c7a-9377-d8c338936f67\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.692234 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config\") pod \"40533406-f1f6-4c7a-9377-d8c338936f67\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.692310 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config-secret\") pod \"40533406-f1f6-4c7a-9377-d8c338936f67\" (UID: \"40533406-f1f6-4c7a-9377-d8c338936f67\") " Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.697596 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40533406-f1f6-4c7a-9377-d8c338936f67-kube-api-access-h5b4n" (OuterVolumeSpecName: "kube-api-access-h5b4n") pod "40533406-f1f6-4c7a-9377-d8c338936f67" (UID: "40533406-f1f6-4c7a-9377-d8c338936f67"). InnerVolumeSpecName "kube-api-access-h5b4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.718294 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "40533406-f1f6-4c7a-9377-d8c338936f67" (UID: "40533406-f1f6-4c7a-9377-d8c338936f67"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.760350 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "40533406-f1f6-4c7a-9377-d8c338936f67" (UID: "40533406-f1f6-4c7a-9377-d8c338936f67"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.796489 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5b4n\" (UniqueName: \"kubernetes.io/projected/40533406-f1f6-4c7a-9377-d8c338936f67-kube-api-access-h5b4n\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.796532 4922 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:38 crc kubenswrapper[4922]: I0930 00:12:38.796545 4922 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/40533406-f1f6-4c7a-9377-d8c338936f67-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 00:12:39 crc kubenswrapper[4922]: I0930 00:12:39.320105 4922 generic.go:334] "Generic (PLEG): container finished" podID="40533406-f1f6-4c7a-9377-d8c338936f67" containerID="9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418" exitCode=137 Sep 30 00:12:39 crc kubenswrapper[4922]: I0930 00:12:39.320177 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 00:12:39 crc kubenswrapper[4922]: I0930 00:12:39.320190 4922 scope.go:117] "RemoveContainer" containerID="9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418" Sep 30 00:12:39 crc kubenswrapper[4922]: I0930 00:12:39.324411 4922 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="40533406-f1f6-4c7a-9377-d8c338936f67" podUID="65bab339-0cd2-43f1-8387-dedc132cb9f3" Sep 30 00:12:39 crc kubenswrapper[4922]: I0930 00:12:39.336626 4922 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="40533406-f1f6-4c7a-9377-d8c338936f67" podUID="65bab339-0cd2-43f1-8387-dedc132cb9f3" Sep 30 00:12:39 crc kubenswrapper[4922]: I0930 00:12:39.366731 4922 scope.go:117] "RemoveContainer" containerID="9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418" Sep 30 00:12:39 crc kubenswrapper[4922]: E0930 00:12:39.367098 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418\": container with ID starting with 9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418 not found: ID does not exist" containerID="9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418" Sep 30 00:12:39 crc kubenswrapper[4922]: I0930 00:12:39.367141 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418"} err="failed to get container status \"9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418\": rpc error: code = NotFound desc = could not find container \"9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418\": container with ID starting with 9fc96f7f4c7fa539a30299bc3dad01a0adb6d32f50ecb25a33dd1b936e5ac418 not found: ID does not exist" Sep 30 00:12:40 crc kubenswrapper[4922]: I0930 00:12:40.432994 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40533406-f1f6-4c7a-9377-d8c338936f67" path="/var/lib/kubelet/pods/40533406-f1f6-4c7a-9377-d8c338936f67/volumes" Sep 30 00:12:45 crc kubenswrapper[4922]: I0930 00:12:45.389002 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6931ec30-85ca-4bdc-824d-4c2a72fa36f8","Type":"ContainerStarted","Data":"84352fd368a52c99d767ad92891f3244da022d2b358506960da1f5298e377086"} Sep 30 00:12:45 crc kubenswrapper[4922]: I0930 00:12:45.391822 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"fd2b9bb1-3eaf-4999-b558-f4778bae160b","Type":"ContainerStarted","Data":"7eee6c2ae880b86c9c7ffcc447eccc3e11166f7ea105ab6951a628c627a60bae"} Sep 30 00:12:46 crc kubenswrapper[4922]: I0930 00:12:46.319685 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 00:12:51 crc kubenswrapper[4922]: I0930 00:12:51.039939 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-dmdzw"] Sep 30 00:12:51 crc kubenswrapper[4922]: I0930 00:12:51.057639 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-dmdzw"] Sep 30 00:12:52 crc kubenswrapper[4922]: I0930 00:12:52.445294 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b19a4ef3-308d-48fd-9f7d-e932f861145c" path="/var/lib/kubelet/pods/b19a4ef3-308d-48fd-9f7d-e932f861145c/volumes" Sep 30 00:12:53 crc kubenswrapper[4922]: I0930 00:12:53.494187 4922 generic.go:334] "Generic (PLEG): container finished" podID="fd2b9bb1-3eaf-4999-b558-f4778bae160b" containerID="7eee6c2ae880b86c9c7ffcc447eccc3e11166f7ea105ab6951a628c627a60bae" exitCode=0 Sep 30 00:12:53 crc kubenswrapper[4922]: I0930 00:12:53.494286 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"fd2b9bb1-3eaf-4999-b558-f4778bae160b","Type":"ContainerDied","Data":"7eee6c2ae880b86c9c7ffcc447eccc3e11166f7ea105ab6951a628c627a60bae"} Sep 30 00:12:55 crc kubenswrapper[4922]: I0930 00:12:55.535451 4922 generic.go:334] "Generic (PLEG): container finished" podID="6931ec30-85ca-4bdc-824d-4c2a72fa36f8" containerID="84352fd368a52c99d767ad92891f3244da022d2b358506960da1f5298e377086" exitCode=0 Sep 30 00:12:55 crc kubenswrapper[4922]: I0930 00:12:55.535503 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6931ec30-85ca-4bdc-824d-4c2a72fa36f8","Type":"ContainerDied","Data":"84352fd368a52c99d767ad92891f3244da022d2b358506960da1f5298e377086"} Sep 30 00:12:55 crc kubenswrapper[4922]: I0930 00:12:55.807224 4922 scope.go:117] "RemoveContainer" containerID="17aefab3d1aa428534857f44817f58d500b24c209055816efcaeaefcdf5a91a1" Sep 30 00:12:55 crc kubenswrapper[4922]: I0930 00:12:55.845843 4922 scope.go:117] "RemoveContainer" containerID="1f67d70f27862ed5c75ca62d1d3cdf78eb529fc104dee0d549963a7efbac033b" Sep 30 00:12:55 crc kubenswrapper[4922]: I0930 00:12:55.952094 4922 scope.go:117] "RemoveContainer" containerID="ab4e10ed4824b61df7e5a6de8b6814e343adfd0e0cf6a9e15ec0223bf28e4884" Sep 30 00:13:00 crc kubenswrapper[4922]: I0930 00:13:00.588382 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6931ec30-85ca-4bdc-824d-4c2a72fa36f8","Type":"ContainerStarted","Data":"7ca40c147d520bfa4d7cc6bcf4c1ac29e8efaddfa781170b125df9c99a4655dc"} Sep 30 00:13:00 crc kubenswrapper[4922]: I0930 00:13:00.591855 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"fd2b9bb1-3eaf-4999-b558-f4778bae160b","Type":"ContainerStarted","Data":"b8564ae3a56fc40182a965fc6dddb5a42c7cc372844df80e3c42329b58cf07d6"} Sep 30 00:13:01 crc kubenswrapper[4922]: I0930 00:13:01.046851 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-1bb4-account-create-whbth"] Sep 30 00:13:01 crc kubenswrapper[4922]: I0930 00:13:01.060922 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-1bb4-account-create-whbth"] Sep 30 00:13:02 crc kubenswrapper[4922]: I0930 00:13:02.436607 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d8783f8-224b-447a-92aa-83c7c39a1b09" path="/var/lib/kubelet/pods/6d8783f8-224b-447a-92aa-83c7c39a1b09/volumes" Sep 30 00:13:04 crc kubenswrapper[4922]: I0930 00:13:04.655830 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"6931ec30-85ca-4bdc-824d-4c2a72fa36f8","Type":"ContainerStarted","Data":"6965ad1286904d96ff273296be1a0535ad9888612e7669a9832f4eb181bb56c8"} Sep 30 00:13:04 crc kubenswrapper[4922]: I0930 00:13:04.656252 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Sep 30 00:13:04 crc kubenswrapper[4922]: I0930 00:13:04.661271 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Sep 30 00:13:04 crc kubenswrapper[4922]: I0930 00:13:04.664051 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"fd2b9bb1-3eaf-4999-b558-f4778bae160b","Type":"ContainerStarted","Data":"dd3168865339efe0892a90316e58fd766d7ff7078be9e50aa0bca20fdf7c89c9"} Sep 30 00:13:04 crc kubenswrapper[4922]: I0930 00:13:04.700028 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=7.094959946 podStartE2EDuration="28.700006419s" podCreationTimestamp="2025-09-30 00:12:36 +0000 UTC" firstStartedPulling="2025-09-30 00:12:37.989847707 +0000 UTC m=+6362.300136520" lastFinishedPulling="2025-09-30 00:12:59.59489418 +0000 UTC m=+6383.905182993" observedRunningTime="2025-09-30 00:13:04.686799392 +0000 UTC m=+6388.997088215" watchObservedRunningTime="2025-09-30 00:13:04.700006419 +0000 UTC m=+6389.010295242" Sep 30 00:13:07 crc kubenswrapper[4922]: I0930 00:13:07.700803 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"fd2b9bb1-3eaf-4999-b558-f4778bae160b","Type":"ContainerStarted","Data":"2e6dfc9888bbb271e95f11f56949b27085a40f27a33b20753b23a1687ab9d1aa"} Sep 30 00:13:07 crc kubenswrapper[4922]: I0930 00:13:07.726665 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=2.602837081 podStartE2EDuration="31.72664472s" podCreationTimestamp="2025-09-30 00:12:36 +0000 UTC" firstStartedPulling="2025-09-30 00:12:38.261897918 +0000 UTC m=+6362.572186731" lastFinishedPulling="2025-09-30 00:13:07.385705557 +0000 UTC m=+6391.695994370" observedRunningTime="2025-09-30 00:13:07.724963368 +0000 UTC m=+6392.035252191" watchObservedRunningTime="2025-09-30 00:13:07.72664472 +0000 UTC m=+6392.036933533" Sep 30 00:13:09 crc kubenswrapper[4922]: I0930 00:13:09.043678 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-sp7xk"] Sep 30 00:13:09 crc kubenswrapper[4922]: I0930 00:13:09.064040 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-sp7xk"] Sep 30 00:13:10 crc kubenswrapper[4922]: I0930 00:13:10.444926 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83" path="/var/lib/kubelet/pods/eaf8102b-90d7-4cb7-a1ff-d49dc6e6ab83/volumes" Sep 30 00:13:12 crc kubenswrapper[4922]: I0930 00:13:12.700001 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.056450 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.059729 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.062630 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.063188 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.067135 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.199014 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-config-data\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.199340 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsvlr\" (UniqueName: \"kubernetes.io/projected/36af16c1-0755-43ab-b45b-3c123dee0fb7-kube-api-access-gsvlr\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.199489 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.199639 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-run-httpd\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.199821 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-scripts\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.200152 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.200273 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-log-httpd\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.302194 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-run-httpd\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.302242 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-scripts\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.302315 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.302354 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-log-httpd\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.302423 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-config-data\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.302489 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsvlr\" (UniqueName: \"kubernetes.io/projected/36af16c1-0755-43ab-b45b-3c123dee0fb7-kube-api-access-gsvlr\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.302518 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.302866 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-run-httpd\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.303239 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-log-httpd\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.316975 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.317179 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.317955 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-scripts\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.318205 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-config-data\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.320974 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsvlr\" (UniqueName: \"kubernetes.io/projected/36af16c1-0755-43ab-b45b-3c123dee0fb7-kube-api-access-gsvlr\") pod \"ceilometer-0\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.380308 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:13:13 crc kubenswrapper[4922]: W0930 00:13:13.875578 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36af16c1_0755_43ab_b45b_3c123dee0fb7.slice/crio-7256b427863065a56f61b9f1f901075b85ce755d2585a93079c51f572188ef81 WatchSource:0}: Error finding container 7256b427863065a56f61b9f1f901075b85ce755d2585a93079c51f572188ef81: Status 404 returned error can't find the container with id 7256b427863065a56f61b9f1f901075b85ce755d2585a93079c51f572188ef81 Sep 30 00:13:13 crc kubenswrapper[4922]: I0930 00:13:13.878048 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:13:14 crc kubenswrapper[4922]: I0930 00:13:14.778232 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerStarted","Data":"d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734"} Sep 30 00:13:14 crc kubenswrapper[4922]: I0930 00:13:14.778588 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerStarted","Data":"7256b427863065a56f61b9f1f901075b85ce755d2585a93079c51f572188ef81"} Sep 30 00:13:15 crc kubenswrapper[4922]: I0930 00:13:15.790241 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerStarted","Data":"4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a"} Sep 30 00:13:16 crc kubenswrapper[4922]: I0930 00:13:16.806034 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerStarted","Data":"5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3"} Sep 30 00:13:18 crc kubenswrapper[4922]: I0930 00:13:18.826774 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerStarted","Data":"a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142"} Sep 30 00:13:19 crc kubenswrapper[4922]: I0930 00:13:19.841830 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:13:19 crc kubenswrapper[4922]: I0930 00:13:19.896011 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.532686545 podStartE2EDuration="6.895982437s" podCreationTimestamp="2025-09-30 00:13:13 +0000 UTC" firstStartedPulling="2025-09-30 00:13:13.879895503 +0000 UTC m=+6398.190184326" lastFinishedPulling="2025-09-30 00:13:18.243191405 +0000 UTC m=+6402.553480218" observedRunningTime="2025-09-30 00:13:19.877623733 +0000 UTC m=+6404.187912586" watchObservedRunningTime="2025-09-30 00:13:19.895982437 +0000 UTC m=+6404.206271290" Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.314278 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-5fdvz"] Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.316631 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-5fdvz" Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.325419 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-5fdvz"] Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.414368 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh4xr\" (UniqueName: \"kubernetes.io/projected/bbc9a231-7e7e-43bd-ad0c-66a5642601c5-kube-api-access-gh4xr\") pod \"aodh-db-create-5fdvz\" (UID: \"bbc9a231-7e7e-43bd-ad0c-66a5642601c5\") " pod="openstack/aodh-db-create-5fdvz" Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.517080 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh4xr\" (UniqueName: \"kubernetes.io/projected/bbc9a231-7e7e-43bd-ad0c-66a5642601c5-kube-api-access-gh4xr\") pod \"aodh-db-create-5fdvz\" (UID: \"bbc9a231-7e7e-43bd-ad0c-66a5642601c5\") " pod="openstack/aodh-db-create-5fdvz" Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.546060 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh4xr\" (UniqueName: \"kubernetes.io/projected/bbc9a231-7e7e-43bd-ad0c-66a5642601c5-kube-api-access-gh4xr\") pod \"aodh-db-create-5fdvz\" (UID: \"bbc9a231-7e7e-43bd-ad0c-66a5642601c5\") " pod="openstack/aodh-db-create-5fdvz" Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.642625 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-5fdvz" Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.700532 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.703881 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 30 00:13:22 crc kubenswrapper[4922]: I0930 00:13:22.907677 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 30 00:13:23 crc kubenswrapper[4922]: I0930 00:13:23.338160 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-5fdvz"] Sep 30 00:13:23 crc kubenswrapper[4922]: I0930 00:13:23.908360 4922 generic.go:334] "Generic (PLEG): container finished" podID="bbc9a231-7e7e-43bd-ad0c-66a5642601c5" containerID="f40caf44cde83805e7d5d16e343d01a16c0f7627bd698b3b85610bd6d4942095" exitCode=0 Sep 30 00:13:23 crc kubenswrapper[4922]: I0930 00:13:23.908453 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-5fdvz" event={"ID":"bbc9a231-7e7e-43bd-ad0c-66a5642601c5","Type":"ContainerDied","Data":"f40caf44cde83805e7d5d16e343d01a16c0f7627bd698b3b85610bd6d4942095"} Sep 30 00:13:23 crc kubenswrapper[4922]: I0930 00:13:23.908795 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-5fdvz" event={"ID":"bbc9a231-7e7e-43bd-ad0c-66a5642601c5","Type":"ContainerStarted","Data":"e3e8f16b12172f7648d504b7e8f5f26b2470ec82065d960f681b0d2a0225133d"} Sep 30 00:13:25 crc kubenswrapper[4922]: I0930 00:13:25.462160 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-5fdvz" Sep 30 00:13:25 crc kubenswrapper[4922]: I0930 00:13:25.602376 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh4xr\" (UniqueName: \"kubernetes.io/projected/bbc9a231-7e7e-43bd-ad0c-66a5642601c5-kube-api-access-gh4xr\") pod \"bbc9a231-7e7e-43bd-ad0c-66a5642601c5\" (UID: \"bbc9a231-7e7e-43bd-ad0c-66a5642601c5\") " Sep 30 00:13:25 crc kubenswrapper[4922]: I0930 00:13:25.616125 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbc9a231-7e7e-43bd-ad0c-66a5642601c5-kube-api-access-gh4xr" (OuterVolumeSpecName: "kube-api-access-gh4xr") pod "bbc9a231-7e7e-43bd-ad0c-66a5642601c5" (UID: "bbc9a231-7e7e-43bd-ad0c-66a5642601c5"). InnerVolumeSpecName "kube-api-access-gh4xr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:25 crc kubenswrapper[4922]: I0930 00:13:25.704696 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh4xr\" (UniqueName: \"kubernetes.io/projected/bbc9a231-7e7e-43bd-ad0c-66a5642601c5-kube-api-access-gh4xr\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:25 crc kubenswrapper[4922]: I0930 00:13:25.932443 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-5fdvz" event={"ID":"bbc9a231-7e7e-43bd-ad0c-66a5642601c5","Type":"ContainerDied","Data":"e3e8f16b12172f7648d504b7e8f5f26b2470ec82065d960f681b0d2a0225133d"} Sep 30 00:13:25 crc kubenswrapper[4922]: I0930 00:13:25.932659 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3e8f16b12172f7648d504b7e8f5f26b2470ec82065d960f681b0d2a0225133d" Sep 30 00:13:25 crc kubenswrapper[4922]: I0930 00:13:25.932595 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-5fdvz" Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.409657 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-e99a-account-create-w5sqs"] Sep 30 00:13:32 crc kubenswrapper[4922]: E0930 00:13:32.410901 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbc9a231-7e7e-43bd-ad0c-66a5642601c5" containerName="mariadb-database-create" Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.410920 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbc9a231-7e7e-43bd-ad0c-66a5642601c5" containerName="mariadb-database-create" Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.411259 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbc9a231-7e7e-43bd-ad0c-66a5642601c5" containerName="mariadb-database-create" Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.412224 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-e99a-account-create-w5sqs" Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.415611 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.449155 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-e99a-account-create-w5sqs"] Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.549203 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvtfx\" (UniqueName: \"kubernetes.io/projected/06fe4f66-4cfc-4807-a952-ceb025f473c3-kube-api-access-hvtfx\") pod \"aodh-e99a-account-create-w5sqs\" (UID: \"06fe4f66-4cfc-4807-a952-ceb025f473c3\") " pod="openstack/aodh-e99a-account-create-w5sqs" Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.651360 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvtfx\" (UniqueName: \"kubernetes.io/projected/06fe4f66-4cfc-4807-a952-ceb025f473c3-kube-api-access-hvtfx\") pod \"aodh-e99a-account-create-w5sqs\" (UID: \"06fe4f66-4cfc-4807-a952-ceb025f473c3\") " pod="openstack/aodh-e99a-account-create-w5sqs" Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.677882 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvtfx\" (UniqueName: \"kubernetes.io/projected/06fe4f66-4cfc-4807-a952-ceb025f473c3-kube-api-access-hvtfx\") pod \"aodh-e99a-account-create-w5sqs\" (UID: \"06fe4f66-4cfc-4807-a952-ceb025f473c3\") " pod="openstack/aodh-e99a-account-create-w5sqs" Sep 30 00:13:32 crc kubenswrapper[4922]: I0930 00:13:32.736810 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-e99a-account-create-w5sqs" Sep 30 00:13:33 crc kubenswrapper[4922]: I0930 00:13:33.244067 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-e99a-account-create-w5sqs"] Sep 30 00:13:34 crc kubenswrapper[4922]: I0930 00:13:34.026734 4922 generic.go:334] "Generic (PLEG): container finished" podID="06fe4f66-4cfc-4807-a952-ceb025f473c3" containerID="785ae5af3d03fc458b06c8ebaa5f7754922c57882cd668430bb20a0798086bed" exitCode=0 Sep 30 00:13:34 crc kubenswrapper[4922]: I0930 00:13:34.026837 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-e99a-account-create-w5sqs" event={"ID":"06fe4f66-4cfc-4807-a952-ceb025f473c3","Type":"ContainerDied","Data":"785ae5af3d03fc458b06c8ebaa5f7754922c57882cd668430bb20a0798086bed"} Sep 30 00:13:34 crc kubenswrapper[4922]: I0930 00:13:34.027187 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-e99a-account-create-w5sqs" event={"ID":"06fe4f66-4cfc-4807-a952-ceb025f473c3","Type":"ContainerStarted","Data":"f85538869c77397aab435119e36b9473562414bcc90b069953b825eeb962d802"} Sep 30 00:13:35 crc kubenswrapper[4922]: I0930 00:13:35.602756 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-e99a-account-create-w5sqs" Sep 30 00:13:35 crc kubenswrapper[4922]: I0930 00:13:35.722087 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvtfx\" (UniqueName: \"kubernetes.io/projected/06fe4f66-4cfc-4807-a952-ceb025f473c3-kube-api-access-hvtfx\") pod \"06fe4f66-4cfc-4807-a952-ceb025f473c3\" (UID: \"06fe4f66-4cfc-4807-a952-ceb025f473c3\") " Sep 30 00:13:35 crc kubenswrapper[4922]: I0930 00:13:35.728334 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06fe4f66-4cfc-4807-a952-ceb025f473c3-kube-api-access-hvtfx" (OuterVolumeSpecName: "kube-api-access-hvtfx") pod "06fe4f66-4cfc-4807-a952-ceb025f473c3" (UID: "06fe4f66-4cfc-4807-a952-ceb025f473c3"). InnerVolumeSpecName "kube-api-access-hvtfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:35 crc kubenswrapper[4922]: I0930 00:13:35.824186 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvtfx\" (UniqueName: \"kubernetes.io/projected/06fe4f66-4cfc-4807-a952-ceb025f473c3-kube-api-access-hvtfx\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:36 crc kubenswrapper[4922]: I0930 00:13:36.055287 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-e99a-account-create-w5sqs" event={"ID":"06fe4f66-4cfc-4807-a952-ceb025f473c3","Type":"ContainerDied","Data":"f85538869c77397aab435119e36b9473562414bcc90b069953b825eeb962d802"} Sep 30 00:13:36 crc kubenswrapper[4922]: I0930 00:13:36.055332 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f85538869c77397aab435119e36b9473562414bcc90b069953b825eeb962d802" Sep 30 00:13:36 crc kubenswrapper[4922]: I0930 00:13:36.055352 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-e99a-account-create-w5sqs" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.844246 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-zf7ks"] Sep 30 00:13:37 crc kubenswrapper[4922]: E0930 00:13:37.845304 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06fe4f66-4cfc-4807-a952-ceb025f473c3" containerName="mariadb-account-create" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.845331 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="06fe4f66-4cfc-4807-a952-ceb025f473c3" containerName="mariadb-account-create" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.845777 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="06fe4f66-4cfc-4807-a952-ceb025f473c3" containerName="mariadb-account-create" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.847104 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.851738 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.851882 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.852029 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-dkk75" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.856827 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-zf7ks"] Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.969249 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-scripts\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.969638 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6bpt\" (UniqueName: \"kubernetes.io/projected/215855a2-d48f-4456-bbd2-4bc1bc6e260a-kube-api-access-h6bpt\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.969786 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-combined-ca-bundle\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:37 crc kubenswrapper[4922]: I0930 00:13:37.970137 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-config-data\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.071584 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-config-data\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.071843 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-scripts\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.072700 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6bpt\" (UniqueName: \"kubernetes.io/projected/215855a2-d48f-4456-bbd2-4bc1bc6e260a-kube-api-access-h6bpt\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.072751 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-combined-ca-bundle\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.080557 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-combined-ca-bundle\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.080960 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-scripts\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.094237 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-config-data\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.096047 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6bpt\" (UniqueName: \"kubernetes.io/projected/215855a2-d48f-4456-bbd2-4bc1bc6e260a-kube-api-access-h6bpt\") pod \"aodh-db-sync-zf7ks\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.182115 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:38 crc kubenswrapper[4922]: I0930 00:13:38.728688 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-zf7ks"] Sep 30 00:13:39 crc kubenswrapper[4922]: I0930 00:13:39.082229 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-zf7ks" event={"ID":"215855a2-d48f-4456-bbd2-4bc1bc6e260a","Type":"ContainerStarted","Data":"a18e5e7cef37e23b9a2a14597e3d3d2db62077fef18f0a42dffcbac0a31ddf08"} Sep 30 00:13:43 crc kubenswrapper[4922]: I0930 00:13:43.391888 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 00:13:44 crc kubenswrapper[4922]: I0930 00:13:44.127524 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-zf7ks" event={"ID":"215855a2-d48f-4456-bbd2-4bc1bc6e260a","Type":"ContainerStarted","Data":"4778ae2cf51787f6e2f25e80b26e47f42f43ce5f8a21602c6650d5f2a338f577"} Sep 30 00:13:44 crc kubenswrapper[4922]: I0930 00:13:44.154722 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-zf7ks" podStartSLOduration=2.61071668 podStartE2EDuration="7.154691776s" podCreationTimestamp="2025-09-30 00:13:37 +0000 UTC" firstStartedPulling="2025-09-30 00:13:38.720909088 +0000 UTC m=+6423.031197901" lastFinishedPulling="2025-09-30 00:13:43.264884184 +0000 UTC m=+6427.575172997" observedRunningTime="2025-09-30 00:13:44.148172975 +0000 UTC m=+6428.458461798" watchObservedRunningTime="2025-09-30 00:13:44.154691776 +0000 UTC m=+6428.464980639" Sep 30 00:13:46 crc kubenswrapper[4922]: I0930 00:13:46.149509 4922 generic.go:334] "Generic (PLEG): container finished" podID="215855a2-d48f-4456-bbd2-4bc1bc6e260a" containerID="4778ae2cf51787f6e2f25e80b26e47f42f43ce5f8a21602c6650d5f2a338f577" exitCode=0 Sep 30 00:13:46 crc kubenswrapper[4922]: I0930 00:13:46.149561 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-zf7ks" event={"ID":"215855a2-d48f-4456-bbd2-4bc1bc6e260a","Type":"ContainerDied","Data":"4778ae2cf51787f6e2f25e80b26e47f42f43ce5f8a21602c6650d5f2a338f577"} Sep 30 00:13:47 crc kubenswrapper[4922]: I0930 00:13:47.788730 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:47 crc kubenswrapper[4922]: I0930 00:13:47.992749 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-combined-ca-bundle\") pod \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " Sep 30 00:13:47 crc kubenswrapper[4922]: I0930 00:13:47.993284 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6bpt\" (UniqueName: \"kubernetes.io/projected/215855a2-d48f-4456-bbd2-4bc1bc6e260a-kube-api-access-h6bpt\") pod \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " Sep 30 00:13:47 crc kubenswrapper[4922]: I0930 00:13:47.993336 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-config-data\") pod \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " Sep 30 00:13:47 crc kubenswrapper[4922]: I0930 00:13:47.993529 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-scripts\") pod \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\" (UID: \"215855a2-d48f-4456-bbd2-4bc1bc6e260a\") " Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.004961 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-scripts" (OuterVolumeSpecName: "scripts") pod "215855a2-d48f-4456-bbd2-4bc1bc6e260a" (UID: "215855a2-d48f-4456-bbd2-4bc1bc6e260a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.018718 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/215855a2-d48f-4456-bbd2-4bc1bc6e260a-kube-api-access-h6bpt" (OuterVolumeSpecName: "kube-api-access-h6bpt") pod "215855a2-d48f-4456-bbd2-4bc1bc6e260a" (UID: "215855a2-d48f-4456-bbd2-4bc1bc6e260a"). InnerVolumeSpecName "kube-api-access-h6bpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.041934 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "215855a2-d48f-4456-bbd2-4bc1bc6e260a" (UID: "215855a2-d48f-4456-bbd2-4bc1bc6e260a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.086311 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-config-data" (OuterVolumeSpecName: "config-data") pod "215855a2-d48f-4456-bbd2-4bc1bc6e260a" (UID: "215855a2-d48f-4456-bbd2-4bc1bc6e260a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.096176 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.096229 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6bpt\" (UniqueName: \"kubernetes.io/projected/215855a2-d48f-4456-bbd2-4bc1bc6e260a-kube-api-access-h6bpt\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.096246 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.096257 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/215855a2-d48f-4456-bbd2-4bc1bc6e260a-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.170139 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-zf7ks" event={"ID":"215855a2-d48f-4456-bbd2-4bc1bc6e260a","Type":"ContainerDied","Data":"a18e5e7cef37e23b9a2a14597e3d3d2db62077fef18f0a42dffcbac0a31ddf08"} Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.170178 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a18e5e7cef37e23b9a2a14597e3d3d2db62077fef18f0a42dffcbac0a31ddf08" Sep 30 00:13:48 crc kubenswrapper[4922]: I0930 00:13:48.170185 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-zf7ks" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.459300 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Sep 30 00:13:52 crc kubenswrapper[4922]: E0930 00:13:52.460893 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215855a2-d48f-4456-bbd2-4bc1bc6e260a" containerName="aodh-db-sync" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.460914 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="215855a2-d48f-4456-bbd2-4bc1bc6e260a" containerName="aodh-db-sync" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.461411 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="215855a2-d48f-4456-bbd2-4bc1bc6e260a" containerName="aodh-db-sync" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.464544 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.467491 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.468796 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.468952 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-dkk75" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.491939 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.496019 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d77e021f-7f43-46d5-9458-6d96907107fa-scripts\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.496127 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d77e021f-7f43-46d5-9458-6d96907107fa-combined-ca-bundle\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.496193 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj8bf\" (UniqueName: \"kubernetes.io/projected/d77e021f-7f43-46d5-9458-6d96907107fa-kube-api-access-dj8bf\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.496354 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d77e021f-7f43-46d5-9458-6d96907107fa-config-data\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.597894 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d77e021f-7f43-46d5-9458-6d96907107fa-config-data\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.597994 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d77e021f-7f43-46d5-9458-6d96907107fa-scripts\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.598058 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d77e021f-7f43-46d5-9458-6d96907107fa-combined-ca-bundle\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.598105 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj8bf\" (UniqueName: \"kubernetes.io/projected/d77e021f-7f43-46d5-9458-6d96907107fa-kube-api-access-dj8bf\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.604919 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d77e021f-7f43-46d5-9458-6d96907107fa-combined-ca-bundle\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.607013 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d77e021f-7f43-46d5-9458-6d96907107fa-scripts\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.607621 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d77e021f-7f43-46d5-9458-6d96907107fa-config-data\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.615778 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj8bf\" (UniqueName: \"kubernetes.io/projected/d77e021f-7f43-46d5-9458-6d96907107fa-kube-api-access-dj8bf\") pod \"aodh-0\" (UID: \"d77e021f-7f43-46d5-9458-6d96907107fa\") " pod="openstack/aodh-0" Sep 30 00:13:52 crc kubenswrapper[4922]: I0930 00:13:52.787863 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Sep 30 00:13:53 crc kubenswrapper[4922]: I0930 00:13:53.333091 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Sep 30 00:13:54 crc kubenswrapper[4922]: I0930 00:13:54.251865 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d77e021f-7f43-46d5-9458-6d96907107fa","Type":"ContainerStarted","Data":"229556dbc55d202683e5f148d0f235e34eb2dbe69832770527d111cf3022f451"} Sep 30 00:13:54 crc kubenswrapper[4922]: I0930 00:13:54.252191 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d77e021f-7f43-46d5-9458-6d96907107fa","Type":"ContainerStarted","Data":"8d9b2574979f56be46767e1676ab1a01c716dfd00857c4dc4d666bbf8eb0cd65"} Sep 30 00:13:54 crc kubenswrapper[4922]: I0930 00:13:54.716088 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:13:54 crc kubenswrapper[4922]: I0930 00:13:54.716798 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="ceilometer-central-agent" containerID="cri-o://d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734" gracePeriod=30 Sep 30 00:13:54 crc kubenswrapper[4922]: I0930 00:13:54.716939 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="sg-core" containerID="cri-o://5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3" gracePeriod=30 Sep 30 00:13:54 crc kubenswrapper[4922]: I0930 00:13:54.717086 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="ceilometer-notification-agent" containerID="cri-o://4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a" gracePeriod=30 Sep 30 00:13:54 crc kubenswrapper[4922]: I0930 00:13:54.717115 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="proxy-httpd" containerID="cri-o://a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142" gracePeriod=30 Sep 30 00:13:55 crc kubenswrapper[4922]: I0930 00:13:55.267706 4922 generic.go:334] "Generic (PLEG): container finished" podID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerID="a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142" exitCode=0 Sep 30 00:13:55 crc kubenswrapper[4922]: I0930 00:13:55.268092 4922 generic.go:334] "Generic (PLEG): container finished" podID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerID="5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3" exitCode=2 Sep 30 00:13:55 crc kubenswrapper[4922]: I0930 00:13:55.268104 4922 generic.go:334] "Generic (PLEG): container finished" podID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerID="d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734" exitCode=0 Sep 30 00:13:55 crc kubenswrapper[4922]: I0930 00:13:55.268127 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerDied","Data":"a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142"} Sep 30 00:13:55 crc kubenswrapper[4922]: I0930 00:13:55.268180 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerDied","Data":"5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3"} Sep 30 00:13:55 crc kubenswrapper[4922]: I0930 00:13:55.268195 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerDied","Data":"d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734"} Sep 30 00:13:56 crc kubenswrapper[4922]: I0930 00:13:56.057474 4922 scope.go:117] "RemoveContainer" containerID="e793d3dc7512e712bfa88fa021bf134db6fcb7feaba47be953447a5d7f3153aa" Sep 30 00:13:56 crc kubenswrapper[4922]: I0930 00:13:56.085088 4922 scope.go:117] "RemoveContainer" containerID="6323babe2626d6ebaea5764df276119afcfe204083fd03ff0abd4c5e21ad4eda" Sep 30 00:13:56 crc kubenswrapper[4922]: I0930 00:13:56.282135 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d77e021f-7f43-46d5-9458-6d96907107fa","Type":"ContainerStarted","Data":"ece858225c01337ea29775d9f9eb3dc3210bb5386db5b6c2f0a2c852a0a1236e"} Sep 30 00:13:57 crc kubenswrapper[4922]: I0930 00:13:57.295204 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d77e021f-7f43-46d5-9458-6d96907107fa","Type":"ContainerStarted","Data":"bc5d6b910196e7207f299b1ce9e46522e3ffbbb6d6d8ee3519f8044bceb95ba1"} Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.106067 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.125229 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-run-httpd\") pod \"36af16c1-0755-43ab-b45b-3c123dee0fb7\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.125301 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsvlr\" (UniqueName: \"kubernetes.io/projected/36af16c1-0755-43ab-b45b-3c123dee0fb7-kube-api-access-gsvlr\") pod \"36af16c1-0755-43ab-b45b-3c123dee0fb7\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.125406 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-config-data\") pod \"36af16c1-0755-43ab-b45b-3c123dee0fb7\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.125567 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-log-httpd\") pod \"36af16c1-0755-43ab-b45b-3c123dee0fb7\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.125656 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-sg-core-conf-yaml\") pod \"36af16c1-0755-43ab-b45b-3c123dee0fb7\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.125754 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-scripts\") pod \"36af16c1-0755-43ab-b45b-3c123dee0fb7\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.125812 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-combined-ca-bundle\") pod \"36af16c1-0755-43ab-b45b-3c123dee0fb7\" (UID: \"36af16c1-0755-43ab-b45b-3c123dee0fb7\") " Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.127930 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "36af16c1-0755-43ab-b45b-3c123dee0fb7" (UID: "36af16c1-0755-43ab-b45b-3c123dee0fb7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.128217 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "36af16c1-0755-43ab-b45b-3c123dee0fb7" (UID: "36af16c1-0755-43ab-b45b-3c123dee0fb7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.156270 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-scripts" (OuterVolumeSpecName: "scripts") pod "36af16c1-0755-43ab-b45b-3c123dee0fb7" (UID: "36af16c1-0755-43ab-b45b-3c123dee0fb7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.162020 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36af16c1-0755-43ab-b45b-3c123dee0fb7-kube-api-access-gsvlr" (OuterVolumeSpecName: "kube-api-access-gsvlr") pod "36af16c1-0755-43ab-b45b-3c123dee0fb7" (UID: "36af16c1-0755-43ab-b45b-3c123dee0fb7"). InnerVolumeSpecName "kube-api-access-gsvlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.207218 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "36af16c1-0755-43ab-b45b-3c123dee0fb7" (UID: "36af16c1-0755-43ab-b45b-3c123dee0fb7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.230044 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.230086 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsvlr\" (UniqueName: \"kubernetes.io/projected/36af16c1-0755-43ab-b45b-3c123dee0fb7-kube-api-access-gsvlr\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.230101 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36af16c1-0755-43ab-b45b-3c123dee0fb7-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.230113 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.230127 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.247064 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36af16c1-0755-43ab-b45b-3c123dee0fb7" (UID: "36af16c1-0755-43ab-b45b-3c123dee0fb7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.295139 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-config-data" (OuterVolumeSpecName: "config-data") pod "36af16c1-0755-43ab-b45b-3c123dee0fb7" (UID: "36af16c1-0755-43ab-b45b-3c123dee0fb7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.307344 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"d77e021f-7f43-46d5-9458-6d96907107fa","Type":"ContainerStarted","Data":"57bd91ac7fd9252545993e05891717ef6c7d47767910998a009c9388fc4f388e"} Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.311170 4922 generic.go:334] "Generic (PLEG): container finished" podID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerID="4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a" exitCode=0 Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.311215 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerDied","Data":"4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a"} Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.311243 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36af16c1-0755-43ab-b45b-3c123dee0fb7","Type":"ContainerDied","Data":"7256b427863065a56f61b9f1f901075b85ce755d2585a93079c51f572188ef81"} Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.311260 4922 scope.go:117] "RemoveContainer" containerID="a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.311256 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.351569 4922 scope.go:117] "RemoveContainer" containerID="5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.394858 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.394903 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36af16c1-0755-43ab-b45b-3c123dee0fb7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.407018 4922 scope.go:117] "RemoveContainer" containerID="4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.418172 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=1.787209763 podStartE2EDuration="6.418151927s" podCreationTimestamp="2025-09-30 00:13:52 +0000 UTC" firstStartedPulling="2025-09-30 00:13:53.337497123 +0000 UTC m=+6437.647785936" lastFinishedPulling="2025-09-30 00:13:57.968439287 +0000 UTC m=+6442.278728100" observedRunningTime="2025-09-30 00:13:58.34986761 +0000 UTC m=+6442.660156423" watchObservedRunningTime="2025-09-30 00:13:58.418151927 +0000 UTC m=+6442.728440730" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.420234 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.444919 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.488637 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:13:58 crc kubenswrapper[4922]: E0930 00:13:58.489113 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="ceilometer-central-agent" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.489132 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="ceilometer-central-agent" Sep 30 00:13:58 crc kubenswrapper[4922]: E0930 00:13:58.489170 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="sg-core" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.489177 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="sg-core" Sep 30 00:13:58 crc kubenswrapper[4922]: E0930 00:13:58.489189 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="proxy-httpd" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.489195 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="proxy-httpd" Sep 30 00:13:58 crc kubenswrapper[4922]: E0930 00:13:58.489217 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="ceilometer-notification-agent" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.489223 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="ceilometer-notification-agent" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.489437 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="ceilometer-central-agent" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.489449 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="sg-core" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.489460 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="proxy-httpd" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.489476 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" containerName="ceilometer-notification-agent" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.494289 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.494385 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.497816 4922 scope.go:117] "RemoveContainer" containerID="d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.498831 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.499618 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.541180 4922 scope.go:117] "RemoveContainer" containerID="a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142" Sep 30 00:13:58 crc kubenswrapper[4922]: E0930 00:13:58.541699 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142\": container with ID starting with a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142 not found: ID does not exist" containerID="a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.541741 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142"} err="failed to get container status \"a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142\": rpc error: code = NotFound desc = could not find container \"a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142\": container with ID starting with a6265e669fc512a7ae8192ac96ce7debfbe4e71822993abf8589c0011a638142 not found: ID does not exist" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.541767 4922 scope.go:117] "RemoveContainer" containerID="5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3" Sep 30 00:13:58 crc kubenswrapper[4922]: E0930 00:13:58.542081 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3\": container with ID starting with 5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3 not found: ID does not exist" containerID="5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.542298 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3"} err="failed to get container status \"5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3\": rpc error: code = NotFound desc = could not find container \"5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3\": container with ID starting with 5c5c77a63ab404befb9a12bacfada32ec5a987086f5d0e26da637d13a26794c3 not found: ID does not exist" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.542312 4922 scope.go:117] "RemoveContainer" containerID="4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a" Sep 30 00:13:58 crc kubenswrapper[4922]: E0930 00:13:58.542526 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a\": container with ID starting with 4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a not found: ID does not exist" containerID="4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.542563 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a"} err="failed to get container status \"4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a\": rpc error: code = NotFound desc = could not find container \"4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a\": container with ID starting with 4409b527bbcc09c966ddf582a14da6440bc2f94dd8696e6c8c5d6823db25f14a not found: ID does not exist" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.542577 4922 scope.go:117] "RemoveContainer" containerID="d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734" Sep 30 00:13:58 crc kubenswrapper[4922]: E0930 00:13:58.542765 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734\": container with ID starting with d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734 not found: ID does not exist" containerID="d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.542782 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734"} err="failed to get container status \"d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734\": rpc error: code = NotFound desc = could not find container \"d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734\": container with ID starting with d59d7111a184937ff8ff3e544aa2597dbe7adb60907092c73675e63fdf0bf734 not found: ID does not exist" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.598175 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-scripts\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.598275 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.598359 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdkq5\" (UniqueName: \"kubernetes.io/projected/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-kube-api-access-xdkq5\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.598382 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-config-data\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.598509 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-run-httpd\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.598550 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-log-httpd\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.598600 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.701210 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-run-httpd\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.701280 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.701304 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-log-httpd\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.701502 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-scripts\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.701590 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.701679 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdkq5\" (UniqueName: \"kubernetes.io/projected/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-kube-api-access-xdkq5\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.701713 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-config-data\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.701806 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-run-httpd\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.702202 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-log-httpd\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.704987 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.705155 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-scripts\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.705481 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.705869 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-config-data\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.719283 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdkq5\" (UniqueName: \"kubernetes.io/projected/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-kube-api-access-xdkq5\") pod \"ceilometer-0\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.831953 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.913354 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:13:58 crc kubenswrapper[4922]: I0930 00:13:58.913457 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:13:59 crc kubenswrapper[4922]: I0930 00:13:59.324179 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:13:59 crc kubenswrapper[4922]: W0930 00:13:59.335713 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda581fa7e_1dc9_4193_b6d9_d79c2e42c645.slice/crio-8df8288b3be1c864ffa32d50ffae4342c254dd9c15ff76a86a551f5bf07a6dc5 WatchSource:0}: Error finding container 8df8288b3be1c864ffa32d50ffae4342c254dd9c15ff76a86a551f5bf07a6dc5: Status 404 returned error can't find the container with id 8df8288b3be1c864ffa32d50ffae4342c254dd9c15ff76a86a551f5bf07a6dc5 Sep 30 00:14:00 crc kubenswrapper[4922]: I0930 00:14:00.337835 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerStarted","Data":"ae4270151a2ea055bac8f293c182a5b6d45e47f69eaa96c7ec398c2fdfd51f4b"} Sep 30 00:14:00 crc kubenswrapper[4922]: I0930 00:14:00.338231 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerStarted","Data":"8df8288b3be1c864ffa32d50ffae4342c254dd9c15ff76a86a551f5bf07a6dc5"} Sep 30 00:14:00 crc kubenswrapper[4922]: I0930 00:14:00.434487 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36af16c1-0755-43ab-b45b-3c123dee0fb7" path="/var/lib/kubelet/pods/36af16c1-0755-43ab-b45b-3c123dee0fb7/volumes" Sep 30 00:14:01 crc kubenswrapper[4922]: I0930 00:14:01.347808 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerStarted","Data":"64f285cccbd51655e387d847463f0d3ba2480b67ffa4296f8405c26cf77dccf7"} Sep 30 00:14:02 crc kubenswrapper[4922]: I0930 00:14:02.365644 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerStarted","Data":"523f183b9e15288264659cae0a81bcac3141cf2f5e21f8432616d9b47c4b498b"} Sep 30 00:14:04 crc kubenswrapper[4922]: I0930 00:14:04.387509 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerStarted","Data":"15031f05769bcef58802b29a5cbfd44f3d7f9bf79c0fe97c7cd6cf0066aaa3b7"} Sep 30 00:14:04 crc kubenswrapper[4922]: I0930 00:14:04.389106 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:14:04 crc kubenswrapper[4922]: I0930 00:14:04.408905 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.467044604 podStartE2EDuration="6.408886935s" podCreationTimestamp="2025-09-30 00:13:58 +0000 UTC" firstStartedPulling="2025-09-30 00:13:59.338247978 +0000 UTC m=+6443.648536791" lastFinishedPulling="2025-09-30 00:14:03.280090309 +0000 UTC m=+6447.590379122" observedRunningTime="2025-09-30 00:14:04.404261741 +0000 UTC m=+6448.714550554" watchObservedRunningTime="2025-09-30 00:14:04.408886935 +0000 UTC m=+6448.719175748" Sep 30 00:14:04 crc kubenswrapper[4922]: I0930 00:14:04.975741 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-rmh6j"] Sep 30 00:14:04 crc kubenswrapper[4922]: I0930 00:14:04.977421 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-rmh6j" Sep 30 00:14:04 crc kubenswrapper[4922]: I0930 00:14:04.984809 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-rmh6j"] Sep 30 00:14:05 crc kubenswrapper[4922]: I0930 00:14:05.160848 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpwwx\" (UniqueName: \"kubernetes.io/projected/3ec28608-d912-4c45-b98b-3eb6bb4d4489-kube-api-access-lpwwx\") pod \"manila-db-create-rmh6j\" (UID: \"3ec28608-d912-4c45-b98b-3eb6bb4d4489\") " pod="openstack/manila-db-create-rmh6j" Sep 30 00:14:05 crc kubenswrapper[4922]: I0930 00:14:05.262604 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpwwx\" (UniqueName: \"kubernetes.io/projected/3ec28608-d912-4c45-b98b-3eb6bb4d4489-kube-api-access-lpwwx\") pod \"manila-db-create-rmh6j\" (UID: \"3ec28608-d912-4c45-b98b-3eb6bb4d4489\") " pod="openstack/manila-db-create-rmh6j" Sep 30 00:14:05 crc kubenswrapper[4922]: I0930 00:14:05.283609 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpwwx\" (UniqueName: \"kubernetes.io/projected/3ec28608-d912-4c45-b98b-3eb6bb4d4489-kube-api-access-lpwwx\") pod \"manila-db-create-rmh6j\" (UID: \"3ec28608-d912-4c45-b98b-3eb6bb4d4489\") " pod="openstack/manila-db-create-rmh6j" Sep 30 00:14:05 crc kubenswrapper[4922]: I0930 00:14:05.297713 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-rmh6j" Sep 30 00:14:05 crc kubenswrapper[4922]: I0930 00:14:05.922813 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-rmh6j"] Sep 30 00:14:06 crc kubenswrapper[4922]: I0930 00:14:06.419914 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-rmh6j" event={"ID":"3ec28608-d912-4c45-b98b-3eb6bb4d4489","Type":"ContainerStarted","Data":"ace20136dc7bff19a00e9ea78ce7add084664e19a4be42772b806a0f5a5693eb"} Sep 30 00:14:07 crc kubenswrapper[4922]: I0930 00:14:07.435744 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-rmh6j" event={"ID":"3ec28608-d912-4c45-b98b-3eb6bb4d4489","Type":"ContainerDied","Data":"efd4f2910aa83ecebae441b42237ffa1bd99d9aa4f5a881a6ea1e45083ec288b"} Sep 30 00:14:07 crc kubenswrapper[4922]: I0930 00:14:07.436211 4922 generic.go:334] "Generic (PLEG): container finished" podID="3ec28608-d912-4c45-b98b-3eb6bb4d4489" containerID="efd4f2910aa83ecebae441b42237ffa1bd99d9aa4f5a881a6ea1e45083ec288b" exitCode=0 Sep 30 00:14:08 crc kubenswrapper[4922]: I0930 00:14:08.932853 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-rmh6j" Sep 30 00:14:09 crc kubenswrapper[4922]: I0930 00:14:09.041688 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lpwwx\" (UniqueName: \"kubernetes.io/projected/3ec28608-d912-4c45-b98b-3eb6bb4d4489-kube-api-access-lpwwx\") pod \"3ec28608-d912-4c45-b98b-3eb6bb4d4489\" (UID: \"3ec28608-d912-4c45-b98b-3eb6bb4d4489\") " Sep 30 00:14:09 crc kubenswrapper[4922]: I0930 00:14:09.050319 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ec28608-d912-4c45-b98b-3eb6bb4d4489-kube-api-access-lpwwx" (OuterVolumeSpecName: "kube-api-access-lpwwx") pod "3ec28608-d912-4c45-b98b-3eb6bb4d4489" (UID: "3ec28608-d912-4c45-b98b-3eb6bb4d4489"). InnerVolumeSpecName "kube-api-access-lpwwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:14:09 crc kubenswrapper[4922]: I0930 00:14:09.144776 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lpwwx\" (UniqueName: \"kubernetes.io/projected/3ec28608-d912-4c45-b98b-3eb6bb4d4489-kube-api-access-lpwwx\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:09 crc kubenswrapper[4922]: I0930 00:14:09.463911 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-rmh6j" event={"ID":"3ec28608-d912-4c45-b98b-3eb6bb4d4489","Type":"ContainerDied","Data":"ace20136dc7bff19a00e9ea78ce7add084664e19a4be42772b806a0f5a5693eb"} Sep 30 00:14:09 crc kubenswrapper[4922]: I0930 00:14:09.463960 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ace20136dc7bff19a00e9ea78ce7add084664e19a4be42772b806a0f5a5693eb" Sep 30 00:14:09 crc kubenswrapper[4922]: I0930 00:14:09.464028 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-rmh6j" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.087340 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-a833-account-create-tqf75"] Sep 30 00:14:15 crc kubenswrapper[4922]: E0930 00:14:15.090716 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ec28608-d912-4c45-b98b-3eb6bb4d4489" containerName="mariadb-database-create" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.090845 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ec28608-d912-4c45-b98b-3eb6bb4d4489" containerName="mariadb-database-create" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.091207 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ec28608-d912-4c45-b98b-3eb6bb4d4489" containerName="mariadb-database-create" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.092244 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a833-account-create-tqf75" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.096112 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.104028 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-a833-account-create-tqf75"] Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.183175 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d9n6\" (UniqueName: \"kubernetes.io/projected/c579a7b6-dcc5-40e0-8661-d71093ffc017-kube-api-access-7d9n6\") pod \"manila-a833-account-create-tqf75\" (UID: \"c579a7b6-dcc5-40e0-8661-d71093ffc017\") " pod="openstack/manila-a833-account-create-tqf75" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.285024 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d9n6\" (UniqueName: \"kubernetes.io/projected/c579a7b6-dcc5-40e0-8661-d71093ffc017-kube-api-access-7d9n6\") pod \"manila-a833-account-create-tqf75\" (UID: \"c579a7b6-dcc5-40e0-8661-d71093ffc017\") " pod="openstack/manila-a833-account-create-tqf75" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.310897 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d9n6\" (UniqueName: \"kubernetes.io/projected/c579a7b6-dcc5-40e0-8661-d71093ffc017-kube-api-access-7d9n6\") pod \"manila-a833-account-create-tqf75\" (UID: \"c579a7b6-dcc5-40e0-8661-d71093ffc017\") " pod="openstack/manila-a833-account-create-tqf75" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.427760 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a833-account-create-tqf75" Sep 30 00:14:15 crc kubenswrapper[4922]: I0930 00:14:15.933810 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-a833-account-create-tqf75"] Sep 30 00:14:15 crc kubenswrapper[4922]: W0930 00:14:15.936117 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc579a7b6_dcc5_40e0_8661_d71093ffc017.slice/crio-01f8deabb31ac489a7660ba8a70d939d0218d1cec703a00dadc2d139a6c57e87 WatchSource:0}: Error finding container 01f8deabb31ac489a7660ba8a70d939d0218d1cec703a00dadc2d139a6c57e87: Status 404 returned error can't find the container with id 01f8deabb31ac489a7660ba8a70d939d0218d1cec703a00dadc2d139a6c57e87 Sep 30 00:14:16 crc kubenswrapper[4922]: I0930 00:14:16.546598 4922 generic.go:334] "Generic (PLEG): container finished" podID="c579a7b6-dcc5-40e0-8661-d71093ffc017" containerID="2da2f8eeb36ee3b8a406020fbee9252b30e0a89b09702d047c91da8ae89875d0" exitCode=0 Sep 30 00:14:16 crc kubenswrapper[4922]: I0930 00:14:16.546638 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a833-account-create-tqf75" event={"ID":"c579a7b6-dcc5-40e0-8661-d71093ffc017","Type":"ContainerDied","Data":"2da2f8eeb36ee3b8a406020fbee9252b30e0a89b09702d047c91da8ae89875d0"} Sep 30 00:14:16 crc kubenswrapper[4922]: I0930 00:14:16.546991 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a833-account-create-tqf75" event={"ID":"c579a7b6-dcc5-40e0-8661-d71093ffc017","Type":"ContainerStarted","Data":"01f8deabb31ac489a7660ba8a70d939d0218d1cec703a00dadc2d139a6c57e87"} Sep 30 00:14:18 crc kubenswrapper[4922]: I0930 00:14:18.030918 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a833-account-create-tqf75" Sep 30 00:14:18 crc kubenswrapper[4922]: I0930 00:14:18.150150 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7d9n6\" (UniqueName: \"kubernetes.io/projected/c579a7b6-dcc5-40e0-8661-d71093ffc017-kube-api-access-7d9n6\") pod \"c579a7b6-dcc5-40e0-8661-d71093ffc017\" (UID: \"c579a7b6-dcc5-40e0-8661-d71093ffc017\") " Sep 30 00:14:18 crc kubenswrapper[4922]: I0930 00:14:18.157090 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c579a7b6-dcc5-40e0-8661-d71093ffc017-kube-api-access-7d9n6" (OuterVolumeSpecName: "kube-api-access-7d9n6") pod "c579a7b6-dcc5-40e0-8661-d71093ffc017" (UID: "c579a7b6-dcc5-40e0-8661-d71093ffc017"). InnerVolumeSpecName "kube-api-access-7d9n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:14:18 crc kubenswrapper[4922]: I0930 00:14:18.253643 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7d9n6\" (UniqueName: \"kubernetes.io/projected/c579a7b6-dcc5-40e0-8661-d71093ffc017-kube-api-access-7d9n6\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:18 crc kubenswrapper[4922]: I0930 00:14:18.568834 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-a833-account-create-tqf75" event={"ID":"c579a7b6-dcc5-40e0-8661-d71093ffc017","Type":"ContainerDied","Data":"01f8deabb31ac489a7660ba8a70d939d0218d1cec703a00dadc2d139a6c57e87"} Sep 30 00:14:18 crc kubenswrapper[4922]: I0930 00:14:18.568883 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01f8deabb31ac489a7660ba8a70d939d0218d1cec703a00dadc2d139a6c57e87" Sep 30 00:14:18 crc kubenswrapper[4922]: I0930 00:14:18.568948 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-a833-account-create-tqf75" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.409864 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-7njnn"] Sep 30 00:14:20 crc kubenswrapper[4922]: E0930 00:14:20.410727 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c579a7b6-dcc5-40e0-8661-d71093ffc017" containerName="mariadb-account-create" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.410784 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c579a7b6-dcc5-40e0-8661-d71093ffc017" containerName="mariadb-account-create" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.411047 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c579a7b6-dcc5-40e0-8661-d71093ffc017" containerName="mariadb-account-create" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.411967 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.415923 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-8zlt7" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.418176 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-7njnn"] Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.476969 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.502822 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-job-config-data\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.502982 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-combined-ca-bundle\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.503007 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rxq4\" (UniqueName: \"kubernetes.io/projected/bae59364-688d-42c9-9dad-6d8702b79983-kube-api-access-4rxq4\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.503048 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-config-data\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.605842 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-combined-ca-bundle\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.605904 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rxq4\" (UniqueName: \"kubernetes.io/projected/bae59364-688d-42c9-9dad-6d8702b79983-kube-api-access-4rxq4\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.605950 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-config-data\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.606153 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-job-config-data\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.622162 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-combined-ca-bundle\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.635468 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-config-data\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.641961 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-job-config-data\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.651940 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rxq4\" (UniqueName: \"kubernetes.io/projected/bae59364-688d-42c9-9dad-6d8702b79983-kube-api-access-4rxq4\") pod \"manila-db-sync-7njnn\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:20 crc kubenswrapper[4922]: I0930 00:14:20.794250 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:21 crc kubenswrapper[4922]: I0930 00:14:21.564062 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-7njnn"] Sep 30 00:14:21 crc kubenswrapper[4922]: I0930 00:14:21.576058 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:14:21 crc kubenswrapper[4922]: I0930 00:14:21.616455 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-7njnn" event={"ID":"bae59364-688d-42c9-9dad-6d8702b79983","Type":"ContainerStarted","Data":"e6b59241333d6f826229a6ca4b1e06de54dba67799432398ffef960d0bb0b78d"} Sep 30 00:14:27 crc kubenswrapper[4922]: I0930 00:14:27.694931 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-7njnn" event={"ID":"bae59364-688d-42c9-9dad-6d8702b79983","Type":"ContainerStarted","Data":"8ab21c9a7dfc16e619a8df6b7cd381af299ecef44cb93b804ff4e9fc9da67e62"} Sep 30 00:14:27 crc kubenswrapper[4922]: I0930 00:14:27.720211 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-7njnn" podStartSLOduration=2.816043744 podStartE2EDuration="7.720187697s" podCreationTimestamp="2025-09-30 00:14:20 +0000 UTC" firstStartedPulling="2025-09-30 00:14:21.575876786 +0000 UTC m=+6465.886165599" lastFinishedPulling="2025-09-30 00:14:26.480020739 +0000 UTC m=+6470.790309552" observedRunningTime="2025-09-30 00:14:27.714811454 +0000 UTC m=+6472.025100297" watchObservedRunningTime="2025-09-30 00:14:27.720187697 +0000 UTC m=+6472.030476520" Sep 30 00:14:28 crc kubenswrapper[4922]: I0930 00:14:28.844099 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 00:14:28 crc kubenswrapper[4922]: I0930 00:14:28.912820 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:14:28 crc kubenswrapper[4922]: I0930 00:14:28.912895 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:14:29 crc kubenswrapper[4922]: I0930 00:14:29.719883 4922 generic.go:334] "Generic (PLEG): container finished" podID="bae59364-688d-42c9-9dad-6d8702b79983" containerID="8ab21c9a7dfc16e619a8df6b7cd381af299ecef44cb93b804ff4e9fc9da67e62" exitCode=0 Sep 30 00:14:29 crc kubenswrapper[4922]: I0930 00:14:29.719962 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-7njnn" event={"ID":"bae59364-688d-42c9-9dad-6d8702b79983","Type":"ContainerDied","Data":"8ab21c9a7dfc16e619a8df6b7cd381af299ecef44cb93b804ff4e9fc9da67e62"} Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.225953 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.347269 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-combined-ca-bundle\") pod \"bae59364-688d-42c9-9dad-6d8702b79983\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.347559 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-job-config-data\") pod \"bae59364-688d-42c9-9dad-6d8702b79983\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.347686 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rxq4\" (UniqueName: \"kubernetes.io/projected/bae59364-688d-42c9-9dad-6d8702b79983-kube-api-access-4rxq4\") pod \"bae59364-688d-42c9-9dad-6d8702b79983\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.347941 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-config-data\") pod \"bae59364-688d-42c9-9dad-6d8702b79983\" (UID: \"bae59364-688d-42c9-9dad-6d8702b79983\") " Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.352552 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "bae59364-688d-42c9-9dad-6d8702b79983" (UID: "bae59364-688d-42c9-9dad-6d8702b79983"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.357367 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-config-data" (OuterVolumeSpecName: "config-data") pod "bae59364-688d-42c9-9dad-6d8702b79983" (UID: "bae59364-688d-42c9-9dad-6d8702b79983"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.361278 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bae59364-688d-42c9-9dad-6d8702b79983-kube-api-access-4rxq4" (OuterVolumeSpecName: "kube-api-access-4rxq4") pod "bae59364-688d-42c9-9dad-6d8702b79983" (UID: "bae59364-688d-42c9-9dad-6d8702b79983"). InnerVolumeSpecName "kube-api-access-4rxq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.383872 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bae59364-688d-42c9-9dad-6d8702b79983" (UID: "bae59364-688d-42c9-9dad-6d8702b79983"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.450901 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.450934 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.450945 4922 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/bae59364-688d-42c9-9dad-6d8702b79983-job-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.450954 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rxq4\" (UniqueName: \"kubernetes.io/projected/bae59364-688d-42c9-9dad-6d8702b79983-kube-api-access-4rxq4\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.745615 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-7njnn" event={"ID":"bae59364-688d-42c9-9dad-6d8702b79983","Type":"ContainerDied","Data":"e6b59241333d6f826229a6ca4b1e06de54dba67799432398ffef960d0bb0b78d"} Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.746083 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6b59241333d6f826229a6ca4b1e06de54dba67799432398ffef960d0bb0b78d" Sep 30 00:14:31 crc kubenswrapper[4922]: I0930 00:14:31.745703 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-7njnn" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.234054 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 00:14:32 crc kubenswrapper[4922]: E0930 00:14:32.235287 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bae59364-688d-42c9-9dad-6d8702b79983" containerName="manila-db-sync" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.235408 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="bae59364-688d-42c9-9dad-6d8702b79983" containerName="manila-db-sync" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.235777 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="bae59364-688d-42c9-9dad-6d8702b79983" containerName="manila-db-sync" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.237317 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.239912 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.240464 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.240666 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.242133 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-8zlt7" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.259906 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.262455 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.267986 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.284595 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.299974 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372300 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-config-data\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372403 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372465 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rr79\" (UniqueName: \"kubernetes.io/projected/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-kube-api-access-8rr79\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372502 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-ceph\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372526 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372549 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drkf6\" (UniqueName: \"kubernetes.io/projected/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-kube-api-access-drkf6\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372652 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-config-data\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372828 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372881 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-scripts\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372913 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372943 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.372958 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.373040 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.373079 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-scripts\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.418532 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6df58b796f-c6bcb"] Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.420384 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.441425 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6df58b796f-c6bcb"] Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.476717 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-config-data\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.478087 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.478254 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rr79\" (UniqueName: \"kubernetes.io/projected/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-kube-api-access-8rr79\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.478351 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-ceph\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.478497 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.478612 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drkf6\" (UniqueName: \"kubernetes.io/projected/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-kube-api-access-drkf6\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.479724 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-config-data\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.479973 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.480115 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-scripts\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.480249 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.480416 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.480665 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.481686 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.481846 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-scripts\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.485278 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-config-data\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.487564 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.489502 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.489548 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.491027 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.491307 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-scripts\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.492040 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-scripts\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.496506 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.499049 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.507932 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.508703 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-ceph\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.518720 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rr79\" (UniqueName: \"kubernetes.io/projected/d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff-kube-api-access-8rr79\") pod \"manila-scheduler-0\" (UID: \"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff\") " pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.519745 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-config-data\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.522896 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drkf6\" (UniqueName: \"kubernetes.io/projected/223e6bbc-9cab-45dc-9975-bc2c3d87cd61-kube-api-access-drkf6\") pod \"manila-share-share1-0\" (UID: \"223e6bbc-9cab-45dc-9975-bc2c3d87cd61\") " pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.560957 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.581021 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.583365 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rprlj\" (UniqueName: \"kubernetes.io/projected/c7a3e6c8-04d4-41f8-8360-a52436482036-kube-api-access-rprlj\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.583507 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-dns-svc\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.583587 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-nb\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.583757 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-config\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.583881 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.583894 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-sb\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.587691 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.591167 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.598430 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691024 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-config\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691107 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-scripts\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691175 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-logs\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691196 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-sb\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691235 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9whgz\" (UniqueName: \"kubernetes.io/projected/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-kube-api-access-9whgz\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691260 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-config-data\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691294 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691321 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rprlj\" (UniqueName: \"kubernetes.io/projected/c7a3e6c8-04d4-41f8-8360-a52436482036-kube-api-access-rprlj\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691356 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-etc-machine-id\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691403 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-dns-svc\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.691437 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-nb\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.695512 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-config-data-custom\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.696703 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-config\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.696919 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-sb\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.697463 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-nb\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.697477 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-dns-svc\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.714789 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rprlj\" (UniqueName: \"kubernetes.io/projected/c7a3e6c8-04d4-41f8-8360-a52436482036-kube-api-access-rprlj\") pod \"dnsmasq-dns-6df58b796f-c6bcb\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.779289 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.797603 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-scripts\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.797675 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-logs\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.797717 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9whgz\" (UniqueName: \"kubernetes.io/projected/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-kube-api-access-9whgz\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.797740 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-config-data\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.797769 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.797803 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-etc-machine-id\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.797880 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-config-data-custom\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.798139 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-logs\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.798504 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-etc-machine-id\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.801069 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-scripts\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.802453 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-config-data\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.803135 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.803886 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-config-data-custom\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:32 crc kubenswrapper[4922]: I0930 00:14:32.816820 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9whgz\" (UniqueName: \"kubernetes.io/projected/5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea-kube-api-access-9whgz\") pod \"manila-api-0\" (UID: \"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea\") " pod="openstack/manila-api-0" Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.047054 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.206446 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.312235 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Sep 30 00:14:33 crc kubenswrapper[4922]: W0930 00:14:33.320627 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod223e6bbc_9cab_45dc_9975_bc2c3d87cd61.slice/crio-196b24cbea3972413f6879ec6db13c64d414246109f860afa921f4c34024a8e1 WatchSource:0}: Error finding container 196b24cbea3972413f6879ec6db13c64d414246109f860afa921f4c34024a8e1: Status 404 returned error can't find the container with id 196b24cbea3972413f6879ec6db13c64d414246109f860afa921f4c34024a8e1 Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.359901 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6df58b796f-c6bcb"] Sep 30 00:14:33 crc kubenswrapper[4922]: W0930 00:14:33.360131 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7a3e6c8_04d4_41f8_8360_a52436482036.slice/crio-b64d499721ae00c3c1d3b2ae3dbb61f4f1ec4acd6d2d9547a69c03f0725f74a1 WatchSource:0}: Error finding container b64d499721ae00c3c1d3b2ae3dbb61f4f1ec4acd6d2d9547a69c03f0725f74a1: Status 404 returned error can't find the container with id b64d499721ae00c3c1d3b2ae3dbb61f4f1ec4acd6d2d9547a69c03f0725f74a1 Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.657488 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.792313 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea","Type":"ContainerStarted","Data":"4593d0addcbb7f414cefa479050e2bb2e5d961972f70a4519175deff4952db76"} Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.794117 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" event={"ID":"c7a3e6c8-04d4-41f8-8360-a52436482036","Type":"ContainerStarted","Data":"be99215aa86a6d75a2ad79e94ba90c636822e3187ef3b9640e6dc58bad2cce60"} Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.794161 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" event={"ID":"c7a3e6c8-04d4-41f8-8360-a52436482036","Type":"ContainerStarted","Data":"b64d499721ae00c3c1d3b2ae3dbb61f4f1ec4acd6d2d9547a69c03f0725f74a1"} Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.795310 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"223e6bbc-9cab-45dc-9975-bc2c3d87cd61","Type":"ContainerStarted","Data":"196b24cbea3972413f6879ec6db13c64d414246109f860afa921f4c34024a8e1"} Sep 30 00:14:33 crc kubenswrapper[4922]: I0930 00:14:33.796422 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff","Type":"ContainerStarted","Data":"1fe052886fcc8a4bd42c6e2aa16974c83b2e2d774c144bd7980154faa64ae418"} Sep 30 00:14:34 crc kubenswrapper[4922]: I0930 00:14:34.817469 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff","Type":"ContainerStarted","Data":"196b40d849da9f882d762a3a8b3d13c5ef7087222d18723c96fe5bb183aa67e0"} Sep 30 00:14:34 crc kubenswrapper[4922]: I0930 00:14:34.820051 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea","Type":"ContainerStarted","Data":"657a887fbc2e27267ccec140e0164a81d08fe7d034cb3a88ff33d5d148e6513b"} Sep 30 00:14:34 crc kubenswrapper[4922]: I0930 00:14:34.823561 4922 generic.go:334] "Generic (PLEG): container finished" podID="c7a3e6c8-04d4-41f8-8360-a52436482036" containerID="be99215aa86a6d75a2ad79e94ba90c636822e3187ef3b9640e6dc58bad2cce60" exitCode=0 Sep 30 00:14:34 crc kubenswrapper[4922]: I0930 00:14:34.823600 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" event={"ID":"c7a3e6c8-04d4-41f8-8360-a52436482036","Type":"ContainerDied","Data":"be99215aa86a6d75a2ad79e94ba90c636822e3187ef3b9640e6dc58bad2cce60"} Sep 30 00:14:35 crc kubenswrapper[4922]: I0930 00:14:35.841641 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff","Type":"ContainerStarted","Data":"cc099e19d777a5eaa0b5c336dd1f359983250af1b17176f13385dc65ea17609d"} Sep 30 00:14:35 crc kubenswrapper[4922]: I0930 00:14:35.846040 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea","Type":"ContainerStarted","Data":"5f3887f4964cb98d8c0b36a084241bf9bbd9df02e2b12077c542ece76e60af9b"} Sep 30 00:14:35 crc kubenswrapper[4922]: I0930 00:14:35.846570 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Sep 30 00:14:35 crc kubenswrapper[4922]: I0930 00:14:35.853320 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" event={"ID":"c7a3e6c8-04d4-41f8-8360-a52436482036","Type":"ContainerStarted","Data":"735236fe1782b7bb1490ffe81bdf23d96a6847951560f89a59586f56d8af4466"} Sep 30 00:14:35 crc kubenswrapper[4922]: I0930 00:14:35.853555 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:35 crc kubenswrapper[4922]: I0930 00:14:35.861887 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.0757501720000002 podStartE2EDuration="3.861864813s" podCreationTimestamp="2025-09-30 00:14:32 +0000 UTC" firstStartedPulling="2025-09-30 00:14:33.247181798 +0000 UTC m=+6477.557470611" lastFinishedPulling="2025-09-30 00:14:34.033296449 +0000 UTC m=+6478.343585252" observedRunningTime="2025-09-30 00:14:35.860046618 +0000 UTC m=+6480.170335431" watchObservedRunningTime="2025-09-30 00:14:35.861864813 +0000 UTC m=+6480.172153626" Sep 30 00:14:35 crc kubenswrapper[4922]: I0930 00:14:35.877715 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" podStartSLOduration=3.877701624 podStartE2EDuration="3.877701624s" podCreationTimestamp="2025-09-30 00:14:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:14:35.876128435 +0000 UTC m=+6480.186417248" watchObservedRunningTime="2025-09-30 00:14:35.877701624 +0000 UTC m=+6480.187990427" Sep 30 00:14:35 crc kubenswrapper[4922]: I0930 00:14:35.895112 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.895077753 podStartE2EDuration="3.895077753s" podCreationTimestamp="2025-09-30 00:14:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:14:35.890338646 +0000 UTC m=+6480.200627459" watchObservedRunningTime="2025-09-30 00:14:35.895077753 +0000 UTC m=+6480.205366576" Sep 30 00:14:42 crc kubenswrapper[4922]: I0930 00:14:42.561934 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Sep 30 00:14:42 crc kubenswrapper[4922]: I0930 00:14:42.781684 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:14:42 crc kubenswrapper[4922]: I0930 00:14:42.863831 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-564f6b675c-t4h5f"] Sep 30 00:14:42 crc kubenswrapper[4922]: I0930 00:14:42.864088 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" podUID="f229dc10-13bc-4dba-bf6e-0889db6cf260" containerName="dnsmasq-dns" containerID="cri-o://b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d" gracePeriod=10 Sep 30 00:14:42 crc kubenswrapper[4922]: I0930 00:14:42.948567 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"223e6bbc-9cab-45dc-9975-bc2c3d87cd61","Type":"ContainerStarted","Data":"34d7cc9f164a47dbdff3f02035db936df040050b43113ec8d2a1bbe241414829"} Sep 30 00:14:42 crc kubenswrapper[4922]: I0930 00:14:42.948614 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"223e6bbc-9cab-45dc-9975-bc2c3d87cd61","Type":"ContainerStarted","Data":"3b74f6a4fdb88865bf4046863cac59f42912cbadedd098ea30dc3348e6f6a133"} Sep 30 00:14:42 crc kubenswrapper[4922]: I0930 00:14:42.984733 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=2.943140756 podStartE2EDuration="10.984718229s" podCreationTimestamp="2025-09-30 00:14:32 +0000 UTC" firstStartedPulling="2025-09-30 00:14:33.32577884 +0000 UTC m=+6477.636067653" lastFinishedPulling="2025-09-30 00:14:41.367356303 +0000 UTC m=+6485.677645126" observedRunningTime="2025-09-30 00:14:42.976946787 +0000 UTC m=+6487.287235600" watchObservedRunningTime="2025-09-30 00:14:42.984718229 +0000 UTC m=+6487.295007042" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.481388 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.591312 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-sb\") pod \"f229dc10-13bc-4dba-bf6e-0889db6cf260\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.591796 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-dns-svc\") pod \"f229dc10-13bc-4dba-bf6e-0889db6cf260\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.591913 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-nb\") pod \"f229dc10-13bc-4dba-bf6e-0889db6cf260\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.591946 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-config\") pod \"f229dc10-13bc-4dba-bf6e-0889db6cf260\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.592033 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbnxk\" (UniqueName: \"kubernetes.io/projected/f229dc10-13bc-4dba-bf6e-0889db6cf260-kube-api-access-bbnxk\") pod \"f229dc10-13bc-4dba-bf6e-0889db6cf260\" (UID: \"f229dc10-13bc-4dba-bf6e-0889db6cf260\") " Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.609812 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f229dc10-13bc-4dba-bf6e-0889db6cf260-kube-api-access-bbnxk" (OuterVolumeSpecName: "kube-api-access-bbnxk") pod "f229dc10-13bc-4dba-bf6e-0889db6cf260" (UID: "f229dc10-13bc-4dba-bf6e-0889db6cf260"). InnerVolumeSpecName "kube-api-access-bbnxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.656623 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-config" (OuterVolumeSpecName: "config") pod "f229dc10-13bc-4dba-bf6e-0889db6cf260" (UID: "f229dc10-13bc-4dba-bf6e-0889db6cf260"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.666624 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f229dc10-13bc-4dba-bf6e-0889db6cf260" (UID: "f229dc10-13bc-4dba-bf6e-0889db6cf260"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.682837 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f229dc10-13bc-4dba-bf6e-0889db6cf260" (UID: "f229dc10-13bc-4dba-bf6e-0889db6cf260"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.685245 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f229dc10-13bc-4dba-bf6e-0889db6cf260" (UID: "f229dc10-13bc-4dba-bf6e-0889db6cf260"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.694766 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.694950 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.695034 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.695120 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f229dc10-13bc-4dba-bf6e-0889db6cf260-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.695191 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbnxk\" (UniqueName: \"kubernetes.io/projected/f229dc10-13bc-4dba-bf6e-0889db6cf260-kube-api-access-bbnxk\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.960517 4922 generic.go:334] "Generic (PLEG): container finished" podID="f229dc10-13bc-4dba-bf6e-0889db6cf260" containerID="b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d" exitCode=0 Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.961478 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.962607 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" event={"ID":"f229dc10-13bc-4dba-bf6e-0889db6cf260","Type":"ContainerDied","Data":"b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d"} Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.962673 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-564f6b675c-t4h5f" event={"ID":"f229dc10-13bc-4dba-bf6e-0889db6cf260","Type":"ContainerDied","Data":"7f8efa7866aa2f5c0c50e18158a38ecb525d6de75e0b025f5be5e63eb5e4e688"} Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.962695 4922 scope.go:117] "RemoveContainer" containerID="b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.989366 4922 scope.go:117] "RemoveContainer" containerID="70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139" Sep 30 00:14:43 crc kubenswrapper[4922]: I0930 00:14:43.997818 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-564f6b675c-t4h5f"] Sep 30 00:14:44 crc kubenswrapper[4922]: I0930 00:14:44.006161 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-564f6b675c-t4h5f"] Sep 30 00:14:44 crc kubenswrapper[4922]: I0930 00:14:44.024376 4922 scope.go:117] "RemoveContainer" containerID="b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d" Sep 30 00:14:44 crc kubenswrapper[4922]: E0930 00:14:44.025007 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d\": container with ID starting with b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d not found: ID does not exist" containerID="b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d" Sep 30 00:14:44 crc kubenswrapper[4922]: I0930 00:14:44.025054 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d"} err="failed to get container status \"b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d\": rpc error: code = NotFound desc = could not find container \"b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d\": container with ID starting with b8d9ecdd4456b928a6a355fef78b732199b21622e8fde799e99f724a5fa4a42d not found: ID does not exist" Sep 30 00:14:44 crc kubenswrapper[4922]: I0930 00:14:44.025082 4922 scope.go:117] "RemoveContainer" containerID="70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139" Sep 30 00:14:44 crc kubenswrapper[4922]: E0930 00:14:44.025418 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139\": container with ID starting with 70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139 not found: ID does not exist" containerID="70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139" Sep 30 00:14:44 crc kubenswrapper[4922]: I0930 00:14:44.025444 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139"} err="failed to get container status \"70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139\": rpc error: code = NotFound desc = could not find container \"70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139\": container with ID starting with 70255c5257fdea945b56687da55ef00174a0e4530938bb6ebc1f2f69505c6139 not found: ID does not exist" Sep 30 00:14:44 crc kubenswrapper[4922]: I0930 00:14:44.433809 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f229dc10-13bc-4dba-bf6e-0889db6cf260" path="/var/lib/kubelet/pods/f229dc10-13bc-4dba-bf6e-0889db6cf260/volumes" Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.058513 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.060077 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="ceilometer-central-agent" containerID="cri-o://ae4270151a2ea055bac8f293c182a5b6d45e47f69eaa96c7ec398c2fdfd51f4b" gracePeriod=30 Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.060152 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="ceilometer-notification-agent" containerID="cri-o://64f285cccbd51655e387d847463f0d3ba2480b67ffa4296f8405c26cf77dccf7" gracePeriod=30 Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.060196 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="proxy-httpd" containerID="cri-o://15031f05769bcef58802b29a5cbfd44f3d7f9bf79c0fe97c7cd6cf0066aaa3b7" gracePeriod=30 Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.060150 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="sg-core" containerID="cri-o://523f183b9e15288264659cae0a81bcac3141cf2f5e21f8432616d9b47c4b498b" gracePeriod=30 Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.998046 4922 generic.go:334] "Generic (PLEG): container finished" podID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerID="15031f05769bcef58802b29a5cbfd44f3d7f9bf79c0fe97c7cd6cf0066aaa3b7" exitCode=0 Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.998313 4922 generic.go:334] "Generic (PLEG): container finished" podID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerID="523f183b9e15288264659cae0a81bcac3141cf2f5e21f8432616d9b47c4b498b" exitCode=2 Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.998327 4922 generic.go:334] "Generic (PLEG): container finished" podID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerID="ae4270151a2ea055bac8f293c182a5b6d45e47f69eaa96c7ec398c2fdfd51f4b" exitCode=0 Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.998142 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerDied","Data":"15031f05769bcef58802b29a5cbfd44f3d7f9bf79c0fe97c7cd6cf0066aaa3b7"} Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.998364 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerDied","Data":"523f183b9e15288264659cae0a81bcac3141cf2f5e21f8432616d9b47c4b498b"} Sep 30 00:14:46 crc kubenswrapper[4922]: I0930 00:14:46.998381 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerDied","Data":"ae4270151a2ea055bac8f293c182a5b6d45e47f69eaa96c7ec398c2fdfd51f4b"} Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.030146 4922 generic.go:334] "Generic (PLEG): container finished" podID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerID="64f285cccbd51655e387d847463f0d3ba2480b67ffa4296f8405c26cf77dccf7" exitCode=0 Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.030217 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerDied","Data":"64f285cccbd51655e387d847463f0d3ba2480b67ffa4296f8405c26cf77dccf7"} Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.314058 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.411581 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-combined-ca-bundle\") pod \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.411688 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-scripts\") pod \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.411721 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-sg-core-conf-yaml\") pod \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.411819 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdkq5\" (UniqueName: \"kubernetes.io/projected/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-kube-api-access-xdkq5\") pod \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.411925 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-config-data\") pod \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.412035 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-log-httpd\") pod \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.412086 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-run-httpd\") pod \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\" (UID: \"a581fa7e-1dc9-4193-b6d9-d79c2e42c645\") " Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.412960 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a581fa7e-1dc9-4193-b6d9-d79c2e42c645" (UID: "a581fa7e-1dc9-4193-b6d9-d79c2e42c645"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.413161 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a581fa7e-1dc9-4193-b6d9-d79c2e42c645" (UID: "a581fa7e-1dc9-4193-b6d9-d79c2e42c645"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.413885 4922 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.413914 4922 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.418722 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-kube-api-access-xdkq5" (OuterVolumeSpecName: "kube-api-access-xdkq5") pod "a581fa7e-1dc9-4193-b6d9-d79c2e42c645" (UID: "a581fa7e-1dc9-4193-b6d9-d79c2e42c645"). InnerVolumeSpecName "kube-api-access-xdkq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.424718 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-scripts" (OuterVolumeSpecName: "scripts") pod "a581fa7e-1dc9-4193-b6d9-d79c2e42c645" (UID: "a581fa7e-1dc9-4193-b6d9-d79c2e42c645"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.464012 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a581fa7e-1dc9-4193-b6d9-d79c2e42c645" (UID: "a581fa7e-1dc9-4193-b6d9-d79c2e42c645"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.516782 4922 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.516809 4922 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.516823 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdkq5\" (UniqueName: \"kubernetes.io/projected/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-kube-api-access-xdkq5\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.517682 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a581fa7e-1dc9-4193-b6d9-d79c2e42c645" (UID: "a581fa7e-1dc9-4193-b6d9-d79c2e42c645"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.537029 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-config-data" (OuterVolumeSpecName: "config-data") pod "a581fa7e-1dc9-4193-b6d9-d79c2e42c645" (UID: "a581fa7e-1dc9-4193-b6d9-d79c2e42c645"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.619205 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:49 crc kubenswrapper[4922]: I0930 00:14:49.619266 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a581fa7e-1dc9-4193-b6d9-d79c2e42c645-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.053754 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a581fa7e-1dc9-4193-b6d9-d79c2e42c645","Type":"ContainerDied","Data":"8df8288b3be1c864ffa32d50ffae4342c254dd9c15ff76a86a551f5bf07a6dc5"} Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.053804 4922 scope.go:117] "RemoveContainer" containerID="15031f05769bcef58802b29a5cbfd44f3d7f9bf79c0fe97c7cd6cf0066aaa3b7" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.053925 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.096241 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.100254 4922 scope.go:117] "RemoveContainer" containerID="523f183b9e15288264659cae0a81bcac3141cf2f5e21f8432616d9b47c4b498b" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.130733 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.137532 4922 scope.go:117] "RemoveContainer" containerID="64f285cccbd51655e387d847463f0d3ba2480b67ffa4296f8405c26cf77dccf7" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.144593 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:14:50 crc kubenswrapper[4922]: E0930 00:14:50.145114 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="ceilometer-notification-agent" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145128 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="ceilometer-notification-agent" Sep 30 00:14:50 crc kubenswrapper[4922]: E0930 00:14:50.145151 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="sg-core" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145156 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="sg-core" Sep 30 00:14:50 crc kubenswrapper[4922]: E0930 00:14:50.145180 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f229dc10-13bc-4dba-bf6e-0889db6cf260" containerName="init" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145188 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f229dc10-13bc-4dba-bf6e-0889db6cf260" containerName="init" Sep 30 00:14:50 crc kubenswrapper[4922]: E0930 00:14:50.145201 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f229dc10-13bc-4dba-bf6e-0889db6cf260" containerName="dnsmasq-dns" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145208 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f229dc10-13bc-4dba-bf6e-0889db6cf260" containerName="dnsmasq-dns" Sep 30 00:14:50 crc kubenswrapper[4922]: E0930 00:14:50.145221 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="proxy-httpd" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145227 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="proxy-httpd" Sep 30 00:14:50 crc kubenswrapper[4922]: E0930 00:14:50.145245 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="ceilometer-central-agent" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145252 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="ceilometer-central-agent" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145542 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="proxy-httpd" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145564 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="sg-core" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145581 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="ceilometer-notification-agent" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145593 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f229dc10-13bc-4dba-bf6e-0889db6cf260" containerName="dnsmasq-dns" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.145606 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" containerName="ceilometer-central-agent" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.147778 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.151916 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.152268 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.156573 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.190741 4922 scope.go:117] "RemoveContainer" containerID="ae4270151a2ea055bac8f293c182a5b6d45e47f69eaa96c7ec398c2fdfd51f4b" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.228918 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.228981 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.229000 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce31b701-95bd-47f4-a6aa-30209b38da1d-log-httpd\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.229067 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-config-data\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.229209 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-scripts\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.229288 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gldd\" (UniqueName: \"kubernetes.io/projected/ce31b701-95bd-47f4-a6aa-30209b38da1d-kube-api-access-7gldd\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.229332 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce31b701-95bd-47f4-a6aa-30209b38da1d-run-httpd\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.331823 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-config-data\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.331898 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-scripts\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.331934 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gldd\" (UniqueName: \"kubernetes.io/projected/ce31b701-95bd-47f4-a6aa-30209b38da1d-kube-api-access-7gldd\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.331960 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce31b701-95bd-47f4-a6aa-30209b38da1d-run-httpd\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.332071 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.332102 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.332118 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce31b701-95bd-47f4-a6aa-30209b38da1d-log-httpd\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.332543 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce31b701-95bd-47f4-a6aa-30209b38da1d-log-httpd\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.332943 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ce31b701-95bd-47f4-a6aa-30209b38da1d-run-httpd\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.336959 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.337510 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-scripts\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.337888 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.338294 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce31b701-95bd-47f4-a6aa-30209b38da1d-config-data\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.352958 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gldd\" (UniqueName: \"kubernetes.io/projected/ce31b701-95bd-47f4-a6aa-30209b38da1d-kube-api-access-7gldd\") pod \"ceilometer-0\" (UID: \"ce31b701-95bd-47f4-a6aa-30209b38da1d\") " pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.434353 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a581fa7e-1dc9-4193-b6d9-d79c2e42c645" path="/var/lib/kubelet/pods/a581fa7e-1dc9-4193-b6d9-d79c2e42c645/volumes" Sep 30 00:14:50 crc kubenswrapper[4922]: I0930 00:14:50.468682 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 00:14:50 crc kubenswrapper[4922]: W0930 00:14:50.995687 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce31b701_95bd_47f4_a6aa_30209b38da1d.slice/crio-97779d0954a0307b4cc9786ecce0e25ebaa4df4fced3ce4110d58a46939e39e3 WatchSource:0}: Error finding container 97779d0954a0307b4cc9786ecce0e25ebaa4df4fced3ce4110d58a46939e39e3: Status 404 returned error can't find the container with id 97779d0954a0307b4cc9786ecce0e25ebaa4df4fced3ce4110d58a46939e39e3 Sep 30 00:14:51 crc kubenswrapper[4922]: I0930 00:14:51.002474 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 00:14:51 crc kubenswrapper[4922]: I0930 00:14:51.065326 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce31b701-95bd-47f4-a6aa-30209b38da1d","Type":"ContainerStarted","Data":"97779d0954a0307b4cc9786ecce0e25ebaa4df4fced3ce4110d58a46939e39e3"} Sep 30 00:14:52 crc kubenswrapper[4922]: I0930 00:14:52.081041 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce31b701-95bd-47f4-a6aa-30209b38da1d","Type":"ContainerStarted","Data":"057922d837b1ffceb76cd1536fef6d221322397b99d22c5546ce1271dd0c1991"} Sep 30 00:14:52 crc kubenswrapper[4922]: I0930 00:14:52.589222 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Sep 30 00:14:53 crc kubenswrapper[4922]: I0930 00:14:53.095509 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce31b701-95bd-47f4-a6aa-30209b38da1d","Type":"ContainerStarted","Data":"8e3f8e61ba2d5d3c361cfe84045c820bd751c10dbec644fd25ea60376c8debbd"} Sep 30 00:14:54 crc kubenswrapper[4922]: I0930 00:14:54.115941 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce31b701-95bd-47f4-a6aa-30209b38da1d","Type":"ContainerStarted","Data":"28c03be75dc0546ad1bef38d93415e2b499602eff3117e3c9058d3757b2f82ae"} Sep 30 00:14:54 crc kubenswrapper[4922]: I0930 00:14:54.170594 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Sep 30 00:14:54 crc kubenswrapper[4922]: I0930 00:14:54.240041 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Sep 30 00:14:54 crc kubenswrapper[4922]: I0930 00:14:54.441669 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Sep 30 00:14:55 crc kubenswrapper[4922]: I0930 00:14:55.130614 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ce31b701-95bd-47f4-a6aa-30209b38da1d","Type":"ContainerStarted","Data":"8c0f05d0cbe281eaa3e8aa977188fae9e5419b3db6d9eda8359d57abdea4edbb"} Sep 30 00:14:55 crc kubenswrapper[4922]: I0930 00:14:55.130991 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 00:14:55 crc kubenswrapper[4922]: I0930 00:14:55.159989 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.65363223 podStartE2EDuration="5.159971222s" podCreationTimestamp="2025-09-30 00:14:50 +0000 UTC" firstStartedPulling="2025-09-30 00:14:51.000571467 +0000 UTC m=+6495.310860290" lastFinishedPulling="2025-09-30 00:14:54.506910469 +0000 UTC m=+6498.817199282" observedRunningTime="2025-09-30 00:14:55.148344195 +0000 UTC m=+6499.458633018" watchObservedRunningTime="2025-09-30 00:14:55.159971222 +0000 UTC m=+6499.470260035" Sep 30 00:14:56 crc kubenswrapper[4922]: I0930 00:14:56.203289 4922 scope.go:117] "RemoveContainer" containerID="4a7b189fb16904438688a60a0be32235d4508701ef61c499971b9387cdf48875" Sep 30 00:14:56 crc kubenswrapper[4922]: I0930 00:14:56.408452 4922 scope.go:117] "RemoveContainer" containerID="3a500f8647fac29dcb8f4e187fbbf4b43fa61eeae8d714722c10ed6162a6d8a1" Sep 30 00:14:58 crc kubenswrapper[4922]: I0930 00:14:58.913050 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:14:58 crc kubenswrapper[4922]: I0930 00:14:58.913597 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:14:58 crc kubenswrapper[4922]: I0930 00:14:58.913695 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:14:58 crc kubenswrapper[4922]: I0930 00:14:58.914945 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a6f6de9491a4e56e81d9e945f5a62e49838ba5953cd0f64477adcdc4f24b42ea"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:14:58 crc kubenswrapper[4922]: I0930 00:14:58.915059 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://a6f6de9491a4e56e81d9e945f5a62e49838ba5953cd0f64477adcdc4f24b42ea" gracePeriod=600 Sep 30 00:14:59 crc kubenswrapper[4922]: I0930 00:14:59.184517 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="a6f6de9491a4e56e81d9e945f5a62e49838ba5953cd0f64477adcdc4f24b42ea" exitCode=0 Sep 30 00:14:59 crc kubenswrapper[4922]: I0930 00:14:59.184779 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"a6f6de9491a4e56e81d9e945f5a62e49838ba5953cd0f64477adcdc4f24b42ea"} Sep 30 00:14:59 crc kubenswrapper[4922]: I0930 00:14:59.184951 4922 scope.go:117] "RemoveContainer" containerID="93bacd40bec2ab78453090503966cb53d247aed37deed5b201da9f8af6e85ff4" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.172869 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh"] Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.175328 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.178171 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.178866 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.186748 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh"] Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.216525 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed"} Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.265765 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2nnn\" (UniqueName: \"kubernetes.io/projected/34125185-0edb-4028-b601-e85f01414b4d-kube-api-access-b2nnn\") pod \"collect-profiles-29319855-884bh\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.265981 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34125185-0edb-4028-b601-e85f01414b4d-secret-volume\") pod \"collect-profiles-29319855-884bh\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.266100 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34125185-0edb-4028-b601-e85f01414b4d-config-volume\") pod \"collect-profiles-29319855-884bh\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.368084 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2nnn\" (UniqueName: \"kubernetes.io/projected/34125185-0edb-4028-b601-e85f01414b4d-kube-api-access-b2nnn\") pod \"collect-profiles-29319855-884bh\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.368272 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34125185-0edb-4028-b601-e85f01414b4d-secret-volume\") pod \"collect-profiles-29319855-884bh\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.368432 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34125185-0edb-4028-b601-e85f01414b4d-config-volume\") pod \"collect-profiles-29319855-884bh\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.369460 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34125185-0edb-4028-b601-e85f01414b4d-config-volume\") pod \"collect-profiles-29319855-884bh\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.377469 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34125185-0edb-4028-b601-e85f01414b4d-secret-volume\") pod \"collect-profiles-29319855-884bh\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.383656 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2nnn\" (UniqueName: \"kubernetes.io/projected/34125185-0edb-4028-b601-e85f01414b4d-kube-api-access-b2nnn\") pod \"collect-profiles-29319855-884bh\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.494179 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:00 crc kubenswrapper[4922]: I0930 00:15:00.924222 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh"] Sep 30 00:15:01 crc kubenswrapper[4922]: I0930 00:15:01.228331 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" event={"ID":"34125185-0edb-4028-b601-e85f01414b4d","Type":"ContainerStarted","Data":"4b437d7fb287ae27f38e87ee7192722a71089302a3d18b4868e8a5994e35c826"} Sep 30 00:15:01 crc kubenswrapper[4922]: I0930 00:15:01.228733 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" event={"ID":"34125185-0edb-4028-b601-e85f01414b4d","Type":"ContainerStarted","Data":"a2977d1054bd53a8d9e3772a3c0fd7ffbc38840d419f9ffbcbcedb065572786f"} Sep 30 00:15:01 crc kubenswrapper[4922]: I0930 00:15:01.256644 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" podStartSLOduration=1.256618967 podStartE2EDuration="1.256618967s" podCreationTimestamp="2025-09-30 00:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:15:01.241231417 +0000 UTC m=+6505.551520230" watchObservedRunningTime="2025-09-30 00:15:01.256618967 +0000 UTC m=+6505.566907780" Sep 30 00:15:02 crc kubenswrapper[4922]: I0930 00:15:02.248789 4922 generic.go:334] "Generic (PLEG): container finished" podID="34125185-0edb-4028-b601-e85f01414b4d" containerID="4b437d7fb287ae27f38e87ee7192722a71089302a3d18b4868e8a5994e35c826" exitCode=0 Sep 30 00:15:02 crc kubenswrapper[4922]: I0930 00:15:02.248898 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" event={"ID":"34125185-0edb-4028-b601-e85f01414b4d","Type":"ContainerDied","Data":"4b437d7fb287ae27f38e87ee7192722a71089302a3d18b4868e8a5994e35c826"} Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.738209 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.842223 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34125185-0edb-4028-b601-e85f01414b4d-secret-volume\") pod \"34125185-0edb-4028-b601-e85f01414b4d\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.842281 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2nnn\" (UniqueName: \"kubernetes.io/projected/34125185-0edb-4028-b601-e85f01414b4d-kube-api-access-b2nnn\") pod \"34125185-0edb-4028-b601-e85f01414b4d\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.842435 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34125185-0edb-4028-b601-e85f01414b4d-config-volume\") pod \"34125185-0edb-4028-b601-e85f01414b4d\" (UID: \"34125185-0edb-4028-b601-e85f01414b4d\") " Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.843542 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34125185-0edb-4028-b601-e85f01414b4d-config-volume" (OuterVolumeSpecName: "config-volume") pod "34125185-0edb-4028-b601-e85f01414b4d" (UID: "34125185-0edb-4028-b601-e85f01414b4d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.849470 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34125185-0edb-4028-b601-e85f01414b4d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "34125185-0edb-4028-b601-e85f01414b4d" (UID: "34125185-0edb-4028-b601-e85f01414b4d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.850221 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34125185-0edb-4028-b601-e85f01414b4d-kube-api-access-b2nnn" (OuterVolumeSpecName: "kube-api-access-b2nnn") pod "34125185-0edb-4028-b601-e85f01414b4d" (UID: "34125185-0edb-4028-b601-e85f01414b4d"). InnerVolumeSpecName "kube-api-access-b2nnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.945148 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34125185-0edb-4028-b601-e85f01414b4d-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.945206 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2nnn\" (UniqueName: \"kubernetes.io/projected/34125185-0edb-4028-b601-e85f01414b4d-kube-api-access-b2nnn\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:03 crc kubenswrapper[4922]: I0930 00:15:03.945229 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34125185-0edb-4028-b601-e85f01414b4d-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:04 crc kubenswrapper[4922]: I0930 00:15:04.288198 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" event={"ID":"34125185-0edb-4028-b601-e85f01414b4d","Type":"ContainerDied","Data":"a2977d1054bd53a8d9e3772a3c0fd7ffbc38840d419f9ffbcbcedb065572786f"} Sep 30 00:15:04 crc kubenswrapper[4922]: I0930 00:15:04.288638 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2977d1054bd53a8d9e3772a3c0fd7ffbc38840d419f9ffbcbcedb065572786f" Sep 30 00:15:04 crc kubenswrapper[4922]: I0930 00:15:04.288784 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh" Sep 30 00:15:04 crc kubenswrapper[4922]: I0930 00:15:04.322515 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s"] Sep 30 00:15:04 crc kubenswrapper[4922]: I0930 00:15:04.330414 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319810-5r88s"] Sep 30 00:15:04 crc kubenswrapper[4922]: I0930 00:15:04.446237 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1574c25c-5b6e-4b42-97b4-b37e85b535c0" path="/var/lib/kubelet/pods/1574c25c-5b6e-4b42-97b4-b37e85b535c0/volumes" Sep 30 00:15:20 crc kubenswrapper[4922]: I0930 00:15:20.477667 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.003924 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cb57b699f-rzj7b"] Sep 30 00:15:43 crc kubenswrapper[4922]: E0930 00:15:43.004955 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34125185-0edb-4028-b601-e85f01414b4d" containerName="collect-profiles" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.004971 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="34125185-0edb-4028-b601-e85f01414b4d" containerName="collect-profiles" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.005189 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="34125185-0edb-4028-b601-e85f01414b4d" containerName="collect-profiles" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.006373 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.017773 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.034547 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cb57b699f-rzj7b"] Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.073709 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-config\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.074059 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.074085 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llkg9\" (UniqueName: \"kubernetes.io/projected/90834b09-0fcb-4675-aff1-8de809768ff2-kube-api-access-llkg9\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.074136 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.074498 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-openstack-cell1\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.074601 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-dns-svc\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.176808 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llkg9\" (UniqueName: \"kubernetes.io/projected/90834b09-0fcb-4675-aff1-8de809768ff2-kube-api-access-llkg9\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.176881 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.176953 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-openstack-cell1\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.176981 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-dns-svc\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.177065 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-config\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.177112 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.178318 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-sb\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.178360 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-dns-svc\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.178411 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-openstack-cell1\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.178545 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-config\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.178629 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-nb\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.200767 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llkg9\" (UniqueName: \"kubernetes.io/projected/90834b09-0fcb-4675-aff1-8de809768ff2-kube-api-access-llkg9\") pod \"dnsmasq-dns-6cb57b699f-rzj7b\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.375563 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:43 crc kubenswrapper[4922]: I0930 00:15:43.888253 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cb57b699f-rzj7b"] Sep 30 00:15:44 crc kubenswrapper[4922]: I0930 00:15:44.797055 4922 generic.go:334] "Generic (PLEG): container finished" podID="90834b09-0fcb-4675-aff1-8de809768ff2" containerID="a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b" exitCode=0 Sep 30 00:15:44 crc kubenswrapper[4922]: I0930 00:15:44.797449 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" event={"ID":"90834b09-0fcb-4675-aff1-8de809768ff2","Type":"ContainerDied","Data":"a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b"} Sep 30 00:15:44 crc kubenswrapper[4922]: I0930 00:15:44.797480 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" event={"ID":"90834b09-0fcb-4675-aff1-8de809768ff2","Type":"ContainerStarted","Data":"5056d6e707b5347401b7d5c752cc0e202cae0f6e7d68e95300cf781273574d81"} Sep 30 00:15:45 crc kubenswrapper[4922]: I0930 00:15:45.809737 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" event={"ID":"90834b09-0fcb-4675-aff1-8de809768ff2","Type":"ContainerStarted","Data":"4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb"} Sep 30 00:15:45 crc kubenswrapper[4922]: I0930 00:15:45.810250 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:45 crc kubenswrapper[4922]: I0930 00:15:45.850549 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" podStartSLOduration=3.8505293050000002 podStartE2EDuration="3.850529305s" podCreationTimestamp="2025-09-30 00:15:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:15:45.845971643 +0000 UTC m=+6550.156260466" watchObservedRunningTime="2025-09-30 00:15:45.850529305 +0000 UTC m=+6550.160818118" Sep 30 00:15:47 crc kubenswrapper[4922]: I0930 00:15:47.985834 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kqr4j"] Sep 30 00:15:47 crc kubenswrapper[4922]: I0930 00:15:47.993681 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.009905 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kqr4j"] Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.089572 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzhmx\" (UniqueName: \"kubernetes.io/projected/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-kube-api-access-hzhmx\") pod \"redhat-operators-kqr4j\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.089868 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-catalog-content\") pod \"redhat-operators-kqr4j\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.092040 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-utilities\") pod \"redhat-operators-kqr4j\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.196432 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzhmx\" (UniqueName: \"kubernetes.io/projected/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-kube-api-access-hzhmx\") pod \"redhat-operators-kqr4j\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.196775 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-catalog-content\") pod \"redhat-operators-kqr4j\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.196956 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-utilities\") pod \"redhat-operators-kqr4j\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.198698 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-utilities\") pod \"redhat-operators-kqr4j\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.200618 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-catalog-content\") pod \"redhat-operators-kqr4j\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.223353 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzhmx\" (UniqueName: \"kubernetes.io/projected/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-kube-api-access-hzhmx\") pod \"redhat-operators-kqr4j\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.351366 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:15:48 crc kubenswrapper[4922]: I0930 00:15:48.903556 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kqr4j"] Sep 30 00:15:48 crc kubenswrapper[4922]: W0930 00:15:48.904099 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84ed6bdb_32d1_4ea5_bb8f_b2c3131eb2c9.slice/crio-db1974f0e50865e7e488613f061312674c3132494649b353b42fa4dbb2cfc9b5 WatchSource:0}: Error finding container db1974f0e50865e7e488613f061312674c3132494649b353b42fa4dbb2cfc9b5: Status 404 returned error can't find the container with id db1974f0e50865e7e488613f061312674c3132494649b353b42fa4dbb2cfc9b5 Sep 30 00:15:49 crc kubenswrapper[4922]: I0930 00:15:49.858217 4922 generic.go:334] "Generic (PLEG): container finished" podID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerID="55d4ee68645dd0eeda0f32651ba2b20b6e93de6e63c5649dca51cece70235075" exitCode=0 Sep 30 00:15:49 crc kubenswrapper[4922]: I0930 00:15:49.858340 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqr4j" event={"ID":"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9","Type":"ContainerDied","Data":"55d4ee68645dd0eeda0f32651ba2b20b6e93de6e63c5649dca51cece70235075"} Sep 30 00:15:49 crc kubenswrapper[4922]: I0930 00:15:49.859639 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqr4j" event={"ID":"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9","Type":"ContainerStarted","Data":"db1974f0e50865e7e488613f061312674c3132494649b353b42fa4dbb2cfc9b5"} Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.370849 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8bdwc"] Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.373211 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.383220 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8bdwc"] Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.452628 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-catalog-content\") pod \"certified-operators-8bdwc\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.452670 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkd56\" (UniqueName: \"kubernetes.io/projected/784be6c7-e015-442c-930a-c3112a355ac9-kube-api-access-dkd56\") pod \"certified-operators-8bdwc\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.452691 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-utilities\") pod \"certified-operators-8bdwc\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.554968 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-catalog-content\") pod \"certified-operators-8bdwc\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.555014 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkd56\" (UniqueName: \"kubernetes.io/projected/784be6c7-e015-442c-930a-c3112a355ac9-kube-api-access-dkd56\") pod \"certified-operators-8bdwc\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.555041 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-utilities\") pod \"certified-operators-8bdwc\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.555485 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-catalog-content\") pod \"certified-operators-8bdwc\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.555549 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-utilities\") pod \"certified-operators-8bdwc\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.574962 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkd56\" (UniqueName: \"kubernetes.io/projected/784be6c7-e015-442c-930a-c3112a355ac9-kube-api-access-dkd56\") pod \"certified-operators-8bdwc\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:50 crc kubenswrapper[4922]: I0930 00:15:50.699488 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:15:51 crc kubenswrapper[4922]: I0930 00:15:51.222866 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8bdwc"] Sep 30 00:15:51 crc kubenswrapper[4922]: I0930 00:15:51.881770 4922 generic.go:334] "Generic (PLEG): container finished" podID="784be6c7-e015-442c-930a-c3112a355ac9" containerID="fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b" exitCode=0 Sep 30 00:15:51 crc kubenswrapper[4922]: I0930 00:15:51.881844 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bdwc" event={"ID":"784be6c7-e015-442c-930a-c3112a355ac9","Type":"ContainerDied","Data":"fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b"} Sep 30 00:15:51 crc kubenswrapper[4922]: I0930 00:15:51.882418 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bdwc" event={"ID":"784be6c7-e015-442c-930a-c3112a355ac9","Type":"ContainerStarted","Data":"da9638ad9b2e0d4764ad814cecc33efd83aa326299c52c0ca9431bb1033fe08c"} Sep 30 00:15:51 crc kubenswrapper[4922]: I0930 00:15:51.888067 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqr4j" event={"ID":"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9","Type":"ContainerStarted","Data":"6b9ce2feda0943496f715a20b573043fe3cb201758c0e162f1d08da5a7053b43"} Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.377890 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.456761 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6df58b796f-c6bcb"] Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.469719 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" podUID="c7a3e6c8-04d4-41f8-8360-a52436482036" containerName="dnsmasq-dns" containerID="cri-o://735236fe1782b7bb1490ffe81bdf23d96a6847951560f89a59586f56d8af4466" gracePeriod=10 Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.685944 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79889664b7-xq248"] Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.688013 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.694288 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79889664b7-xq248"] Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.722743 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s765g\" (UniqueName: \"kubernetes.io/projected/5929b9d1-6b81-44e2-8ecd-9bda69d61400-kube-api-access-s765g\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.723149 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-ovsdbserver-nb\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.723540 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-config\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.723748 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-openstack-cell1\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.723857 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-dns-svc\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.723988 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-ovsdbserver-sb\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.824990 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-config\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.825050 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-openstack-cell1\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.825072 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-dns-svc\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.825101 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-ovsdbserver-sb\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.825145 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s765g\" (UniqueName: \"kubernetes.io/projected/5929b9d1-6b81-44e2-8ecd-9bda69d61400-kube-api-access-s765g\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.825198 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-ovsdbserver-nb\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.826118 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-ovsdbserver-nb\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.826197 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-config\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.826350 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-dns-svc\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.826656 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-ovsdbserver-sb\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.827202 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/5929b9d1-6b81-44e2-8ecd-9bda69d61400-openstack-cell1\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:53 crc kubenswrapper[4922]: I0930 00:15:53.849344 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s765g\" (UniqueName: \"kubernetes.io/projected/5929b9d1-6b81-44e2-8ecd-9bda69d61400-kube-api-access-s765g\") pod \"dnsmasq-dns-79889664b7-xq248\" (UID: \"5929b9d1-6b81-44e2-8ecd-9bda69d61400\") " pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:54 crc kubenswrapper[4922]: I0930 00:15:54.011606 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:54 crc kubenswrapper[4922]: I0930 00:15:54.069295 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-jf6bs"] Sep 30 00:15:54 crc kubenswrapper[4922]: I0930 00:15:54.079514 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-jf6bs"] Sep 30 00:15:54 crc kubenswrapper[4922]: I0930 00:15:54.446905 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9" path="/var/lib/kubelet/pods/9d90a7b7-ceca-4008-a43e-6f76a5fc4ae9/volumes" Sep 30 00:15:54 crc kubenswrapper[4922]: I0930 00:15:54.754313 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79889664b7-xq248"] Sep 30 00:15:54 crc kubenswrapper[4922]: I0930 00:15:54.938618 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bdwc" event={"ID":"784be6c7-e015-442c-930a-c3112a355ac9","Type":"ContainerStarted","Data":"89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1"} Sep 30 00:15:54 crc kubenswrapper[4922]: I0930 00:15:54.946742 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79889664b7-xq248" event={"ID":"5929b9d1-6b81-44e2-8ecd-9bda69d61400","Type":"ContainerStarted","Data":"4c81340b0d5df6f7faa75b1860f6e776394fc63c0fb300783ec05e39b23bbb9e"} Sep 30 00:15:54 crc kubenswrapper[4922]: I0930 00:15:54.949195 4922 generic.go:334] "Generic (PLEG): container finished" podID="c7a3e6c8-04d4-41f8-8360-a52436482036" containerID="735236fe1782b7bb1490ffe81bdf23d96a6847951560f89a59586f56d8af4466" exitCode=0 Sep 30 00:15:54 crc kubenswrapper[4922]: I0930 00:15:54.949240 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" event={"ID":"c7a3e6c8-04d4-41f8-8360-a52436482036","Type":"ContainerDied","Data":"735236fe1782b7bb1490ffe81bdf23d96a6847951560f89a59586f56d8af4466"} Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.367806 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.470580 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-nb\") pod \"c7a3e6c8-04d4-41f8-8360-a52436482036\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.470685 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-dns-svc\") pod \"c7a3e6c8-04d4-41f8-8360-a52436482036\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.470750 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-sb\") pod \"c7a3e6c8-04d4-41f8-8360-a52436482036\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.470821 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rprlj\" (UniqueName: \"kubernetes.io/projected/c7a3e6c8-04d4-41f8-8360-a52436482036-kube-api-access-rprlj\") pod \"c7a3e6c8-04d4-41f8-8360-a52436482036\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.471080 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-config\") pod \"c7a3e6c8-04d4-41f8-8360-a52436482036\" (UID: \"c7a3e6c8-04d4-41f8-8360-a52436482036\") " Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.488545 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7a3e6c8-04d4-41f8-8360-a52436482036-kube-api-access-rprlj" (OuterVolumeSpecName: "kube-api-access-rprlj") pod "c7a3e6c8-04d4-41f8-8360-a52436482036" (UID: "c7a3e6c8-04d4-41f8-8360-a52436482036"). InnerVolumeSpecName "kube-api-access-rprlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.530949 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c7a3e6c8-04d4-41f8-8360-a52436482036" (UID: "c7a3e6c8-04d4-41f8-8360-a52436482036"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.536865 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-config" (OuterVolumeSpecName: "config") pod "c7a3e6c8-04d4-41f8-8360-a52436482036" (UID: "c7a3e6c8-04d4-41f8-8360-a52436482036"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.538386 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c7a3e6c8-04d4-41f8-8360-a52436482036" (UID: "c7a3e6c8-04d4-41f8-8360-a52436482036"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.551571 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c7a3e6c8-04d4-41f8-8360-a52436482036" (UID: "c7a3e6c8-04d4-41f8-8360-a52436482036"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.573949 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.573980 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.573994 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.574007 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c7a3e6c8-04d4-41f8-8360-a52436482036-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.574019 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rprlj\" (UniqueName: \"kubernetes.io/projected/c7a3e6c8-04d4-41f8-8360-a52436482036-kube-api-access-rprlj\") on node \"crc\" DevicePath \"\"" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.959438 4922 generic.go:334] "Generic (PLEG): container finished" podID="5929b9d1-6b81-44e2-8ecd-9bda69d61400" containerID="8d1bbc45018b6126c298e30e05c40e86dc02a9afe2b1773b8d203554a24df0a9" exitCode=0 Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.959496 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79889664b7-xq248" event={"ID":"5929b9d1-6b81-44e2-8ecd-9bda69d61400","Type":"ContainerDied","Data":"8d1bbc45018b6126c298e30e05c40e86dc02a9afe2b1773b8d203554a24df0a9"} Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.962072 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" event={"ID":"c7a3e6c8-04d4-41f8-8360-a52436482036","Type":"ContainerDied","Data":"b64d499721ae00c3c1d3b2ae3dbb61f4f1ec4acd6d2d9547a69c03f0725f74a1"} Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.962122 4922 scope.go:117] "RemoveContainer" containerID="735236fe1782b7bb1490ffe81bdf23d96a6847951560f89a59586f56d8af4466" Sep 30 00:15:55 crc kubenswrapper[4922]: I0930 00:15:55.962325 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6df58b796f-c6bcb" Sep 30 00:15:56 crc kubenswrapper[4922]: I0930 00:15:56.008616 4922 scope.go:117] "RemoveContainer" containerID="be99215aa86a6d75a2ad79e94ba90c636822e3187ef3b9640e6dc58bad2cce60" Sep 30 00:15:56 crc kubenswrapper[4922]: I0930 00:15:56.119485 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6df58b796f-c6bcb"] Sep 30 00:15:56 crc kubenswrapper[4922]: I0930 00:15:56.128314 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6df58b796f-c6bcb"] Sep 30 00:15:56 crc kubenswrapper[4922]: I0930 00:15:56.436866 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7a3e6c8-04d4-41f8-8360-a52436482036" path="/var/lib/kubelet/pods/c7a3e6c8-04d4-41f8-8360-a52436482036/volumes" Sep 30 00:15:56 crc kubenswrapper[4922]: I0930 00:15:56.549458 4922 scope.go:117] "RemoveContainer" containerID="75c33abaa1c299d844021f0b2769504ff502ef71909f3243d882bb42b4967184" Sep 30 00:15:56 crc kubenswrapper[4922]: I0930 00:15:56.799500 4922 scope.go:117] "RemoveContainer" containerID="4638d6f25be9c6e0b973b24298661a25674cbfc556658026f7ef1f1d0726b7d2" Sep 30 00:15:57 crc kubenswrapper[4922]: I0930 00:15:57.997478 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79889664b7-xq248" event={"ID":"5929b9d1-6b81-44e2-8ecd-9bda69d61400","Type":"ContainerStarted","Data":"aff8ce77b2d964b769beabb9247e5319d70385b05e4153d8889891e89c321b29"} Sep 30 00:15:57 crc kubenswrapper[4922]: I0930 00:15:57.998234 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:15:58 crc kubenswrapper[4922]: I0930 00:15:58.050842 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79889664b7-xq248" podStartSLOduration=5.050278134 podStartE2EDuration="5.050278134s" podCreationTimestamp="2025-09-30 00:15:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:15:58.028114707 +0000 UTC m=+6562.338403540" watchObservedRunningTime="2025-09-30 00:15:58.050278134 +0000 UTC m=+6562.360566947" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.790461 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj"] Sep 30 00:15:59 crc kubenswrapper[4922]: E0930 00:15:59.791688 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7a3e6c8-04d4-41f8-8360-a52436482036" containerName="dnsmasq-dns" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.791709 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7a3e6c8-04d4-41f8-8360-a52436482036" containerName="dnsmasq-dns" Sep 30 00:15:59 crc kubenswrapper[4922]: E0930 00:15:59.791725 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7a3e6c8-04d4-41f8-8360-a52436482036" containerName="init" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.791736 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7a3e6c8-04d4-41f8-8360-a52436482036" containerName="init" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.792148 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7a3e6c8-04d4-41f8-8360-a52436482036" containerName="dnsmasq-dns" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.793458 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.795759 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.795817 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.796569 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.796834 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.808224 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj"] Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.976564 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.976866 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.977106 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hnvj\" (UniqueName: \"kubernetes.io/projected/8471899a-f22f-4f56-b48d-8644475f56ec-kube-api-access-9hnvj\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.977255 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:15:59 crc kubenswrapper[4922]: I0930 00:15:59.977514 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.079590 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.080076 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.080151 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hnvj\" (UniqueName: \"kubernetes.io/projected/8471899a-f22f-4f56-b48d-8644475f56ec-kube-api-access-9hnvj\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.080216 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.080337 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.089172 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.089593 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.091189 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.096604 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.100624 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hnvj\" (UniqueName: \"kubernetes.io/projected/8471899a-f22f-4f56-b48d-8644475f56ec-kube-api-access-9hnvj\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.116239 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:00 crc kubenswrapper[4922]: I0930 00:16:00.845775 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj"] Sep 30 00:16:00 crc kubenswrapper[4922]: W0930 00:16:00.853058 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8471899a_f22f_4f56_b48d_8644475f56ec.slice/crio-ffc014baa6af072a78005cd1a0a189282960fb433463b8788be0ace52acdf437 WatchSource:0}: Error finding container ffc014baa6af072a78005cd1a0a189282960fb433463b8788be0ace52acdf437: Status 404 returned error can't find the container with id ffc014baa6af072a78005cd1a0a189282960fb433463b8788be0ace52acdf437 Sep 30 00:16:01 crc kubenswrapper[4922]: I0930 00:16:01.030362 4922 generic.go:334] "Generic (PLEG): container finished" podID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerID="6b9ce2feda0943496f715a20b573043fe3cb201758c0e162f1d08da5a7053b43" exitCode=0 Sep 30 00:16:01 crc kubenswrapper[4922]: I0930 00:16:01.030435 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqr4j" event={"ID":"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9","Type":"ContainerDied","Data":"6b9ce2feda0943496f715a20b573043fe3cb201758c0e162f1d08da5a7053b43"} Sep 30 00:16:01 crc kubenswrapper[4922]: I0930 00:16:01.031981 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" event={"ID":"8471899a-f22f-4f56-b48d-8644475f56ec","Type":"ContainerStarted","Data":"ffc014baa6af072a78005cd1a0a189282960fb433463b8788be0ace52acdf437"} Sep 30 00:16:01 crc kubenswrapper[4922]: I0930 00:16:01.034450 4922 generic.go:334] "Generic (PLEG): container finished" podID="784be6c7-e015-442c-930a-c3112a355ac9" containerID="89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1" exitCode=0 Sep 30 00:16:01 crc kubenswrapper[4922]: I0930 00:16:01.034476 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bdwc" event={"ID":"784be6c7-e015-442c-930a-c3112a355ac9","Type":"ContainerDied","Data":"89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1"} Sep 30 00:16:02 crc kubenswrapper[4922]: I0930 00:16:02.048339 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqr4j" event={"ID":"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9","Type":"ContainerStarted","Data":"ab8f0ae113a99df4623b08f983b1820c75d88368f5b28b3d4ce686297112b4e4"} Sep 30 00:16:02 crc kubenswrapper[4922]: I0930 00:16:02.074274 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kqr4j" podStartSLOduration=3.41950815 podStartE2EDuration="15.074198713s" podCreationTimestamp="2025-09-30 00:15:47 +0000 UTC" firstStartedPulling="2025-09-30 00:15:49.86090751 +0000 UTC m=+6554.171196333" lastFinishedPulling="2025-09-30 00:16:01.515598083 +0000 UTC m=+6565.825886896" observedRunningTime="2025-09-30 00:16:02.066749679 +0000 UTC m=+6566.377038512" watchObservedRunningTime="2025-09-30 00:16:02.074198713 +0000 UTC m=+6566.384487556" Sep 30 00:16:03 crc kubenswrapper[4922]: I0930 00:16:03.062674 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bdwc" event={"ID":"784be6c7-e015-442c-930a-c3112a355ac9","Type":"ContainerStarted","Data":"9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393"} Sep 30 00:16:03 crc kubenswrapper[4922]: I0930 00:16:03.086447 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8bdwc" podStartSLOduration=3.082377175 podStartE2EDuration="13.086429859s" podCreationTimestamp="2025-09-30 00:15:50 +0000 UTC" firstStartedPulling="2025-09-30 00:15:51.884522513 +0000 UTC m=+6556.194811336" lastFinishedPulling="2025-09-30 00:16:01.888575207 +0000 UTC m=+6566.198864020" observedRunningTime="2025-09-30 00:16:03.082081951 +0000 UTC m=+6567.392370774" watchObservedRunningTime="2025-09-30 00:16:03.086429859 +0000 UTC m=+6567.396718672" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.012637 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79889664b7-xq248" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.084593 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cb57b699f-rzj7b"] Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.084867 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" podUID="90834b09-0fcb-4675-aff1-8de809768ff2" containerName="dnsmasq-dns" containerID="cri-o://4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb" gracePeriod=10 Sep 30 00:16:04 crc kubenswrapper[4922]: E0930 00:16:04.383136 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90834b09_0fcb_4675_aff1_8de809768ff2.slice/crio-conmon-4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90834b09_0fcb_4675_aff1_8de809768ff2.slice/crio-4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb.scope\": RecentStats: unable to find data in memory cache]" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.681431 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.799288 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-sb\") pod \"90834b09-0fcb-4675-aff1-8de809768ff2\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.799972 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-nb\") pod \"90834b09-0fcb-4675-aff1-8de809768ff2\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.800016 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-openstack-cell1\") pod \"90834b09-0fcb-4675-aff1-8de809768ff2\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.800055 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-dns-svc\") pod \"90834b09-0fcb-4675-aff1-8de809768ff2\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.800086 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llkg9\" (UniqueName: \"kubernetes.io/projected/90834b09-0fcb-4675-aff1-8de809768ff2-kube-api-access-llkg9\") pod \"90834b09-0fcb-4675-aff1-8de809768ff2\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.800186 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-config\") pod \"90834b09-0fcb-4675-aff1-8de809768ff2\" (UID: \"90834b09-0fcb-4675-aff1-8de809768ff2\") " Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.806207 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90834b09-0fcb-4675-aff1-8de809768ff2-kube-api-access-llkg9" (OuterVolumeSpecName: "kube-api-access-llkg9") pod "90834b09-0fcb-4675-aff1-8de809768ff2" (UID: "90834b09-0fcb-4675-aff1-8de809768ff2"). InnerVolumeSpecName "kube-api-access-llkg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.873445 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "90834b09-0fcb-4675-aff1-8de809768ff2" (UID: "90834b09-0fcb-4675-aff1-8de809768ff2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.875369 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "90834b09-0fcb-4675-aff1-8de809768ff2" (UID: "90834b09-0fcb-4675-aff1-8de809768ff2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.886674 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "90834b09-0fcb-4675-aff1-8de809768ff2" (UID: "90834b09-0fcb-4675-aff1-8de809768ff2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.902337 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.902379 4922 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.902596 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llkg9\" (UniqueName: \"kubernetes.io/projected/90834b09-0fcb-4675-aff1-8de809768ff2-kube-api-access-llkg9\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.902615 4922 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.914033 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "90834b09-0fcb-4675-aff1-8de809768ff2" (UID: "90834b09-0fcb-4675-aff1-8de809768ff2"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:16:04 crc kubenswrapper[4922]: I0930 00:16:04.923136 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-config" (OuterVolumeSpecName: "config") pod "90834b09-0fcb-4675-aff1-8de809768ff2" (UID: "90834b09-0fcb-4675-aff1-8de809768ff2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.006377 4922 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-config\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.007015 4922 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/90834b09-0fcb-4675-aff1-8de809768ff2-openstack-cell1\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.104242 4922 generic.go:334] "Generic (PLEG): container finished" podID="90834b09-0fcb-4675-aff1-8de809768ff2" containerID="4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb" exitCode=0 Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.104293 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" event={"ID":"90834b09-0fcb-4675-aff1-8de809768ff2","Type":"ContainerDied","Data":"4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb"} Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.104314 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.104329 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cb57b699f-rzj7b" event={"ID":"90834b09-0fcb-4675-aff1-8de809768ff2","Type":"ContainerDied","Data":"5056d6e707b5347401b7d5c752cc0e202cae0f6e7d68e95300cf781273574d81"} Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.104356 4922 scope.go:117] "RemoveContainer" containerID="4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb" Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.148732 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cb57b699f-rzj7b"] Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.150746 4922 scope.go:117] "RemoveContainer" containerID="a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b" Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.160503 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6cb57b699f-rzj7b"] Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.181161 4922 scope.go:117] "RemoveContainer" containerID="4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb" Sep 30 00:16:05 crc kubenswrapper[4922]: E0930 00:16:05.181869 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb\": container with ID starting with 4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb not found: ID does not exist" containerID="4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb" Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.181912 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb"} err="failed to get container status \"4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb\": rpc error: code = NotFound desc = could not find container \"4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb\": container with ID starting with 4960b37ede8cb4a32eaffe2350bec47ee830b93ac4a040e049044356587eecdb not found: ID does not exist" Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.181939 4922 scope.go:117] "RemoveContainer" containerID="a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b" Sep 30 00:16:05 crc kubenswrapper[4922]: E0930 00:16:05.182164 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b\": container with ID starting with a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b not found: ID does not exist" containerID="a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b" Sep 30 00:16:05 crc kubenswrapper[4922]: I0930 00:16:05.182183 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b"} err="failed to get container status \"a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b\": rpc error: code = NotFound desc = could not find container \"a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b\": container with ID starting with a3febfaed628c8dfd5311fc2b5bac12922e8afda00e9dc5e854de0f06efa5b6b not found: ID does not exist" Sep 30 00:16:06 crc kubenswrapper[4922]: I0930 00:16:06.046017 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-136c-account-create-cfqr8"] Sep 30 00:16:06 crc kubenswrapper[4922]: I0930 00:16:06.058938 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-136c-account-create-cfqr8"] Sep 30 00:16:06 crc kubenswrapper[4922]: I0930 00:16:06.437807 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f" path="/var/lib/kubelet/pods/3e6dd6f0-e7fb-4ab5-8e50-6937937e2d4f/volumes" Sep 30 00:16:06 crc kubenswrapper[4922]: I0930 00:16:06.439924 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90834b09-0fcb-4675-aff1-8de809768ff2" path="/var/lib/kubelet/pods/90834b09-0fcb-4675-aff1-8de809768ff2/volumes" Sep 30 00:16:08 crc kubenswrapper[4922]: I0930 00:16:08.352049 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:16:08 crc kubenswrapper[4922]: I0930 00:16:08.352840 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:16:08 crc kubenswrapper[4922]: I0930 00:16:08.404569 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:16:09 crc kubenswrapper[4922]: I0930 00:16:09.213312 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:16:09 crc kubenswrapper[4922]: I0930 00:16:09.265794 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kqr4j"] Sep 30 00:16:10 crc kubenswrapper[4922]: I0930 00:16:10.700411 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:16:10 crc kubenswrapper[4922]: I0930 00:16:10.700781 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:16:11 crc kubenswrapper[4922]: I0930 00:16:11.171052 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kqr4j" podUID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerName="registry-server" containerID="cri-o://ab8f0ae113a99df4623b08f983b1820c75d88368f5b28b3d4ce686297112b4e4" gracePeriod=2 Sep 30 00:16:11 crc kubenswrapper[4922]: I0930 00:16:11.754774 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-8bdwc" podUID="784be6c7-e015-442c-930a-c3112a355ac9" containerName="registry-server" probeResult="failure" output=< Sep 30 00:16:11 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 00:16:11 crc kubenswrapper[4922]: > Sep 30 00:16:12 crc kubenswrapper[4922]: I0930 00:16:12.184185 4922 generic.go:334] "Generic (PLEG): container finished" podID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerID="ab8f0ae113a99df4623b08f983b1820c75d88368f5b28b3d4ce686297112b4e4" exitCode=0 Sep 30 00:16:12 crc kubenswrapper[4922]: I0930 00:16:12.184321 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqr4j" event={"ID":"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9","Type":"ContainerDied","Data":"ab8f0ae113a99df4623b08f983b1820c75d88368f5b28b3d4ce686297112b4e4"} Sep 30 00:16:13 crc kubenswrapper[4922]: I0930 00:16:13.027491 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-qcvft"] Sep 30 00:16:13 crc kubenswrapper[4922]: I0930 00:16:13.037690 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-qcvft"] Sep 30 00:16:14 crc kubenswrapper[4922]: I0930 00:16:14.452713 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63605a12-e656-4baa-8b39-208131d28a5c" path="/var/lib/kubelet/pods/63605a12-e656-4baa-8b39-208131d28a5c/volumes" Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.226687 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kqr4j" event={"ID":"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9","Type":"ContainerDied","Data":"db1974f0e50865e7e488613f061312674c3132494649b353b42fa4dbb2cfc9b5"} Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.227166 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db1974f0e50865e7e488613f061312674c3132494649b353b42fa4dbb2cfc9b5" Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.260843 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.361121 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-utilities\") pod \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.361331 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzhmx\" (UniqueName: \"kubernetes.io/projected/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-kube-api-access-hzhmx\") pod \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.361368 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-catalog-content\") pod \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\" (UID: \"84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9\") " Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.363368 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-utilities" (OuterVolumeSpecName: "utilities") pod "84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" (UID: "84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.366286 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-kube-api-access-hzhmx" (OuterVolumeSpecName: "kube-api-access-hzhmx") pod "84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" (UID: "84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9"). InnerVolumeSpecName "kube-api-access-hzhmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.447986 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" (UID: "84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.464957 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.465036 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzhmx\" (UniqueName: \"kubernetes.io/projected/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-kube-api-access-hzhmx\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:15 crc kubenswrapper[4922]: I0930 00:16:15.465066 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:16 crc kubenswrapper[4922]: I0930 00:16:16.242813 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kqr4j" Sep 30 00:16:16 crc kubenswrapper[4922]: I0930 00:16:16.242884 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" event={"ID":"8471899a-f22f-4f56-b48d-8644475f56ec","Type":"ContainerStarted","Data":"08143fe612bd00b1af5f507cebab09b6c5894e071782c7951ec41e7cbe1faf68"} Sep 30 00:16:16 crc kubenswrapper[4922]: I0930 00:16:16.281209 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" podStartSLOduration=2.957814768 podStartE2EDuration="17.281177248s" podCreationTimestamp="2025-09-30 00:15:59 +0000 UTC" firstStartedPulling="2025-09-30 00:16:00.855794103 +0000 UTC m=+6565.166082906" lastFinishedPulling="2025-09-30 00:16:15.179156573 +0000 UTC m=+6579.489445386" observedRunningTime="2025-09-30 00:16:16.27073451 +0000 UTC m=+6580.581023343" watchObservedRunningTime="2025-09-30 00:16:16.281177248 +0000 UTC m=+6580.591466101" Sep 30 00:16:16 crc kubenswrapper[4922]: I0930 00:16:16.311774 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kqr4j"] Sep 30 00:16:16 crc kubenswrapper[4922]: I0930 00:16:16.323824 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kqr4j"] Sep 30 00:16:16 crc kubenswrapper[4922]: I0930 00:16:16.433535 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" path="/var/lib/kubelet/pods/84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9/volumes" Sep 30 00:16:20 crc kubenswrapper[4922]: I0930 00:16:20.754863 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:16:20 crc kubenswrapper[4922]: I0930 00:16:20.841460 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:16:21 crc kubenswrapper[4922]: I0930 00:16:21.575140 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8bdwc"] Sep 30 00:16:22 crc kubenswrapper[4922]: I0930 00:16:22.314731 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8bdwc" podUID="784be6c7-e015-442c-930a-c3112a355ac9" containerName="registry-server" containerID="cri-o://9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393" gracePeriod=2 Sep 30 00:16:22 crc kubenswrapper[4922]: I0930 00:16:22.857291 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:16:22 crc kubenswrapper[4922]: I0930 00:16:22.958170 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-catalog-content\") pod \"784be6c7-e015-442c-930a-c3112a355ac9\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " Sep 30 00:16:22 crc kubenswrapper[4922]: I0930 00:16:22.958265 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkd56\" (UniqueName: \"kubernetes.io/projected/784be6c7-e015-442c-930a-c3112a355ac9-kube-api-access-dkd56\") pod \"784be6c7-e015-442c-930a-c3112a355ac9\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " Sep 30 00:16:22 crc kubenswrapper[4922]: I0930 00:16:22.958444 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-utilities\") pod \"784be6c7-e015-442c-930a-c3112a355ac9\" (UID: \"784be6c7-e015-442c-930a-c3112a355ac9\") " Sep 30 00:16:22 crc kubenswrapper[4922]: I0930 00:16:22.963817 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-utilities" (OuterVolumeSpecName: "utilities") pod "784be6c7-e015-442c-930a-c3112a355ac9" (UID: "784be6c7-e015-442c-930a-c3112a355ac9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:16:22 crc kubenswrapper[4922]: I0930 00:16:22.964678 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/784be6c7-e015-442c-930a-c3112a355ac9-kube-api-access-dkd56" (OuterVolumeSpecName: "kube-api-access-dkd56") pod "784be6c7-e015-442c-930a-c3112a355ac9" (UID: "784be6c7-e015-442c-930a-c3112a355ac9"). InnerVolumeSpecName "kube-api-access-dkd56". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.010667 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "784be6c7-e015-442c-930a-c3112a355ac9" (UID: "784be6c7-e015-442c-930a-c3112a355ac9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.061690 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.061724 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkd56\" (UniqueName: \"kubernetes.io/projected/784be6c7-e015-442c-930a-c3112a355ac9-kube-api-access-dkd56\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.061736 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/784be6c7-e015-442c-930a-c3112a355ac9-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.331337 4922 generic.go:334] "Generic (PLEG): container finished" podID="784be6c7-e015-442c-930a-c3112a355ac9" containerID="9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393" exitCode=0 Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.331382 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bdwc" event={"ID":"784be6c7-e015-442c-930a-c3112a355ac9","Type":"ContainerDied","Data":"9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393"} Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.331427 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8bdwc" event={"ID":"784be6c7-e015-442c-930a-c3112a355ac9","Type":"ContainerDied","Data":"da9638ad9b2e0d4764ad814cecc33efd83aa326299c52c0ca9431bb1033fe08c"} Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.331449 4922 scope.go:117] "RemoveContainer" containerID="9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.331474 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8bdwc" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.364332 4922 scope.go:117] "RemoveContainer" containerID="89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.398450 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8bdwc"] Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.400923 4922 scope.go:117] "RemoveContainer" containerID="fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.424057 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8bdwc"] Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.470526 4922 scope.go:117] "RemoveContainer" containerID="9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393" Sep 30 00:16:23 crc kubenswrapper[4922]: E0930 00:16:23.471141 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393\": container with ID starting with 9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393 not found: ID does not exist" containerID="9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.471197 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393"} err="failed to get container status \"9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393\": rpc error: code = NotFound desc = could not find container \"9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393\": container with ID starting with 9c75d115e20c664859d941964c4a08dd310cf25bc30768c8b9c1e4f9b12bf393 not found: ID does not exist" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.471229 4922 scope.go:117] "RemoveContainer" containerID="89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1" Sep 30 00:16:23 crc kubenswrapper[4922]: E0930 00:16:23.471879 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1\": container with ID starting with 89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1 not found: ID does not exist" containerID="89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.471900 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1"} err="failed to get container status \"89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1\": rpc error: code = NotFound desc = could not find container \"89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1\": container with ID starting with 89552af2cde31ee3d24c0696a9a7c1eb17e5bc92aa31c4a74317fdf5f857c9d1 not found: ID does not exist" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.471914 4922 scope.go:117] "RemoveContainer" containerID="fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b" Sep 30 00:16:23 crc kubenswrapper[4922]: E0930 00:16:23.472158 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b\": container with ID starting with fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b not found: ID does not exist" containerID="fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b" Sep 30 00:16:23 crc kubenswrapper[4922]: I0930 00:16:23.472197 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b"} err="failed to get container status \"fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b\": rpc error: code = NotFound desc = could not find container \"fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b\": container with ID starting with fcdca4934655126753914798640fd1587c9aabedbc0fa2a4c90524412cdde66b not found: ID does not exist" Sep 30 00:16:24 crc kubenswrapper[4922]: I0930 00:16:24.036494 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-0333-account-create-hdnzt"] Sep 30 00:16:24 crc kubenswrapper[4922]: I0930 00:16:24.049303 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-0333-account-create-hdnzt"] Sep 30 00:16:24 crc kubenswrapper[4922]: I0930 00:16:24.442075 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c" path="/var/lib/kubelet/pods/3bdbf3a2-dbcb-430e-a2b5-9dd60e06df2c/volumes" Sep 30 00:16:24 crc kubenswrapper[4922]: I0930 00:16:24.446436 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="784be6c7-e015-442c-930a-c3112a355ac9" path="/var/lib/kubelet/pods/784be6c7-e015-442c-930a-c3112a355ac9/volumes" Sep 30 00:16:29 crc kubenswrapper[4922]: I0930 00:16:29.413208 4922 generic.go:334] "Generic (PLEG): container finished" podID="8471899a-f22f-4f56-b48d-8644475f56ec" containerID="08143fe612bd00b1af5f507cebab09b6c5894e071782c7951ec41e7cbe1faf68" exitCode=0 Sep 30 00:16:29 crc kubenswrapper[4922]: I0930 00:16:29.413341 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" event={"ID":"8471899a-f22f-4f56-b48d-8644475f56ec","Type":"ContainerDied","Data":"08143fe612bd00b1af5f507cebab09b6c5894e071782c7951ec41e7cbe1faf68"} Sep 30 00:16:30 crc kubenswrapper[4922]: I0930 00:16:30.911173 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:30 crc kubenswrapper[4922]: I0930 00:16:30.975709 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ceph\") pod \"8471899a-f22f-4f56-b48d-8644475f56ec\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " Sep 30 00:16:30 crc kubenswrapper[4922]: I0930 00:16:30.975877 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ssh-key\") pod \"8471899a-f22f-4f56-b48d-8644475f56ec\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " Sep 30 00:16:30 crc kubenswrapper[4922]: I0930 00:16:30.976004 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hnvj\" (UniqueName: \"kubernetes.io/projected/8471899a-f22f-4f56-b48d-8644475f56ec-kube-api-access-9hnvj\") pod \"8471899a-f22f-4f56-b48d-8644475f56ec\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " Sep 30 00:16:30 crc kubenswrapper[4922]: I0930 00:16:30.976043 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-pre-adoption-validation-combined-ca-bundle\") pod \"8471899a-f22f-4f56-b48d-8644475f56ec\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " Sep 30 00:16:30 crc kubenswrapper[4922]: I0930 00:16:30.976137 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-inventory\") pod \"8471899a-f22f-4f56-b48d-8644475f56ec\" (UID: \"8471899a-f22f-4f56-b48d-8644475f56ec\") " Sep 30 00:16:30 crc kubenswrapper[4922]: I0930 00:16:30.982726 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8471899a-f22f-4f56-b48d-8644475f56ec-kube-api-access-9hnvj" (OuterVolumeSpecName: "kube-api-access-9hnvj") pod "8471899a-f22f-4f56-b48d-8644475f56ec" (UID: "8471899a-f22f-4f56-b48d-8644475f56ec"). InnerVolumeSpecName "kube-api-access-9hnvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:16:30 crc kubenswrapper[4922]: I0930 00:16:30.983370 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "8471899a-f22f-4f56-b48d-8644475f56ec" (UID: "8471899a-f22f-4f56-b48d-8644475f56ec"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:16:30 crc kubenswrapper[4922]: I0930 00:16:30.989523 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ceph" (OuterVolumeSpecName: "ceph") pod "8471899a-f22f-4f56-b48d-8644475f56ec" (UID: "8471899a-f22f-4f56-b48d-8644475f56ec"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.012725 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8471899a-f22f-4f56-b48d-8644475f56ec" (UID: "8471899a-f22f-4f56-b48d-8644475f56ec"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.027988 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-inventory" (OuterVolumeSpecName: "inventory") pod "8471899a-f22f-4f56-b48d-8644475f56ec" (UID: "8471899a-f22f-4f56-b48d-8644475f56ec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.079292 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hnvj\" (UniqueName: \"kubernetes.io/projected/8471899a-f22f-4f56-b48d-8644475f56ec-kube-api-access-9hnvj\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.079329 4922 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.079361 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.079375 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.079384 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8471899a-f22f-4f56-b48d-8644475f56ec-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.440555 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" event={"ID":"8471899a-f22f-4f56-b48d-8644475f56ec","Type":"ContainerDied","Data":"ffc014baa6af072a78005cd1a0a189282960fb433463b8788be0ace52acdf437"} Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.440599 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffc014baa6af072a78005cd1a0a189282960fb433463b8788be0ace52acdf437" Sep 30 00:16:31 crc kubenswrapper[4922]: I0930 00:16:31.440637 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.549637 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45"] Sep 30 00:16:37 crc kubenswrapper[4922]: E0930 00:16:37.550621 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="784be6c7-e015-442c-930a-c3112a355ac9" containerName="extract-utilities" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.550638 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="784be6c7-e015-442c-930a-c3112a355ac9" containerName="extract-utilities" Sep 30 00:16:37 crc kubenswrapper[4922]: E0930 00:16:37.550665 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90834b09-0fcb-4675-aff1-8de809768ff2" containerName="dnsmasq-dns" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.550673 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="90834b09-0fcb-4675-aff1-8de809768ff2" containerName="dnsmasq-dns" Sep 30 00:16:37 crc kubenswrapper[4922]: E0930 00:16:37.550693 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="784be6c7-e015-442c-930a-c3112a355ac9" containerName="extract-content" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.550700 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="784be6c7-e015-442c-930a-c3112a355ac9" containerName="extract-content" Sep 30 00:16:37 crc kubenswrapper[4922]: E0930 00:16:37.550717 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="784be6c7-e015-442c-930a-c3112a355ac9" containerName="registry-server" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.550723 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="784be6c7-e015-442c-930a-c3112a355ac9" containerName="registry-server" Sep 30 00:16:37 crc kubenswrapper[4922]: E0930 00:16:37.550739 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8471899a-f22f-4f56-b48d-8644475f56ec" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.550751 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8471899a-f22f-4f56-b48d-8644475f56ec" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Sep 30 00:16:37 crc kubenswrapper[4922]: E0930 00:16:37.550766 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerName="extract-utilities" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.550776 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerName="extract-utilities" Sep 30 00:16:37 crc kubenswrapper[4922]: E0930 00:16:37.550798 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerName="extract-content" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.550804 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerName="extract-content" Sep 30 00:16:37 crc kubenswrapper[4922]: E0930 00:16:37.550814 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerName="registry-server" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.550820 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerName="registry-server" Sep 30 00:16:37 crc kubenswrapper[4922]: E0930 00:16:37.550831 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90834b09-0fcb-4675-aff1-8de809768ff2" containerName="init" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.550837 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="90834b09-0fcb-4675-aff1-8de809768ff2" containerName="init" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.551061 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ed6bdb-32d1-4ea5-bb8f-b2c3131eb2c9" containerName="registry-server" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.551082 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="90834b09-0fcb-4675-aff1-8de809768ff2" containerName="dnsmasq-dns" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.551097 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="784be6c7-e015-442c-930a-c3112a355ac9" containerName="registry-server" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.551109 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8471899a-f22f-4f56-b48d-8644475f56ec" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.551845 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.553792 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.554010 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.554264 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.560858 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.571667 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45"] Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.637270 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.637331 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.637460 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ml8q\" (UniqueName: \"kubernetes.io/projected/8caf595c-95ea-4701-b5e2-97e970cdf01b-kube-api-access-7ml8q\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.637561 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.637605 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.740732 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ml8q\" (UniqueName: \"kubernetes.io/projected/8caf595c-95ea-4701-b5e2-97e970cdf01b-kube-api-access-7ml8q\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.741065 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.741200 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.741435 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.741527 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.748824 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.749496 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.756529 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.758070 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ml8q\" (UniqueName: \"kubernetes.io/projected/8caf595c-95ea-4701-b5e2-97e970cdf01b-kube-api-access-7ml8q\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.759688 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:37 crc kubenswrapper[4922]: I0930 00:16:37.878939 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:16:38 crc kubenswrapper[4922]: I0930 00:16:38.468582 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45"] Sep 30 00:16:38 crc kubenswrapper[4922]: I0930 00:16:38.530947 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" event={"ID":"8caf595c-95ea-4701-b5e2-97e970cdf01b","Type":"ContainerStarted","Data":"a03be10ab44790217139558379f5e313c14028e5347f580d4a2d9d2abe9b48f1"} Sep 30 00:16:39 crc kubenswrapper[4922]: I0930 00:16:39.545121 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" event={"ID":"8caf595c-95ea-4701-b5e2-97e970cdf01b","Type":"ContainerStarted","Data":"ffcbc168ccdc21cb8e44fbfeed4697f893d5fe49415c7190aecf773ef43b3ffa"} Sep 30 00:16:39 crc kubenswrapper[4922]: I0930 00:16:39.572332 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" podStartSLOduration=2.423890856 podStartE2EDuration="2.572312572s" podCreationTimestamp="2025-09-30 00:16:37 +0000 UTC" firstStartedPulling="2025-09-30 00:16:38.470248746 +0000 UTC m=+6602.780537579" lastFinishedPulling="2025-09-30 00:16:38.618670482 +0000 UTC m=+6602.928959295" observedRunningTime="2025-09-30 00:16:39.563456513 +0000 UTC m=+6603.873745326" watchObservedRunningTime="2025-09-30 00:16:39.572312572 +0000 UTC m=+6603.882601385" Sep 30 00:16:57 crc kubenswrapper[4922]: I0930 00:16:57.058196 4922 scope.go:117] "RemoveContainer" containerID="a52b1c46aa548d742031368e23b2372a0b981271573ed914d972c003ccecb639" Sep 30 00:16:57 crc kubenswrapper[4922]: I0930 00:16:57.083968 4922 scope.go:117] "RemoveContainer" containerID="d3dce7fa9d953efd0f0792ae18ad625b1dae69a7f64b8c560f4517585176a232" Sep 30 00:16:57 crc kubenswrapper[4922]: I0930 00:16:57.138565 4922 scope.go:117] "RemoveContainer" containerID="4bbd87d9b98c36c6c9a16f5094bc99997802869a4a8b8fcd57b9957854e31e47" Sep 30 00:17:05 crc kubenswrapper[4922]: I0930 00:17:05.059778 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-t7ndh"] Sep 30 00:17:05 crc kubenswrapper[4922]: I0930 00:17:05.087802 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-t7ndh"] Sep 30 00:17:06 crc kubenswrapper[4922]: I0930 00:17:06.448772 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06172216-e6e0-41bc-8622-1eeba0c9bc8b" path="/var/lib/kubelet/pods/06172216-e6e0-41bc-8622-1eeba0c9bc8b/volumes" Sep 30 00:17:28 crc kubenswrapper[4922]: I0930 00:17:28.913180 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:17:28 crc kubenswrapper[4922]: I0930 00:17:28.913861 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:17:57 crc kubenswrapper[4922]: I0930 00:17:57.301211 4922 scope.go:117] "RemoveContainer" containerID="2b4838cce3350cf25b2394679ecc60099bfb4f31fb0006d1eae043f6a7f1cb11" Sep 30 00:17:57 crc kubenswrapper[4922]: I0930 00:17:57.341257 4922 scope.go:117] "RemoveContainer" containerID="23ee53f48508d70204975c5d97e0e42e2f1fc40d0a63f9486229c5a779ceefcb" Sep 30 00:17:58 crc kubenswrapper[4922]: I0930 00:17:58.912925 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:17:58 crc kubenswrapper[4922]: I0930 00:17:58.913325 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:18:28 crc kubenswrapper[4922]: I0930 00:18:28.913985 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:18:28 crc kubenswrapper[4922]: I0930 00:18:28.914594 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:18:28 crc kubenswrapper[4922]: I0930 00:18:28.914658 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:18:28 crc kubenswrapper[4922]: I0930 00:18:28.915742 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:18:28 crc kubenswrapper[4922]: I0930 00:18:28.915816 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" gracePeriod=600 Sep 30 00:18:29 crc kubenswrapper[4922]: E0930 00:18:29.042683 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:18:29 crc kubenswrapper[4922]: E0930 00:18:29.048507 4922 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod347374f7_ade0_4434_b26d_db474c4413f9.slice/crio-cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed.scope\": RecentStats: unable to find data in memory cache]" Sep 30 00:18:29 crc kubenswrapper[4922]: I0930 00:18:29.797654 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" exitCode=0 Sep 30 00:18:29 crc kubenswrapper[4922]: I0930 00:18:29.797779 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed"} Sep 30 00:18:29 crc kubenswrapper[4922]: I0930 00:18:29.798026 4922 scope.go:117] "RemoveContainer" containerID="a6f6de9491a4e56e81d9e945f5a62e49838ba5953cd0f64477adcdc4f24b42ea" Sep 30 00:18:29 crc kubenswrapper[4922]: I0930 00:18:29.798842 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:18:29 crc kubenswrapper[4922]: E0930 00:18:29.799172 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:18:42 crc kubenswrapper[4922]: I0930 00:18:42.993596 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9zmmb"] Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.001022 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.019465 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zmmb"] Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.167006 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-utilities\") pod \"redhat-marketplace-9zmmb\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.167088 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jp4sc\" (UniqueName: \"kubernetes.io/projected/3264fca8-030f-4f4f-bb5a-01e958ae19ed-kube-api-access-jp4sc\") pod \"redhat-marketplace-9zmmb\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.167269 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-catalog-content\") pod \"redhat-marketplace-9zmmb\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.269067 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-catalog-content\") pod \"redhat-marketplace-9zmmb\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.269231 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-utilities\") pod \"redhat-marketplace-9zmmb\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.269268 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jp4sc\" (UniqueName: \"kubernetes.io/projected/3264fca8-030f-4f4f-bb5a-01e958ae19ed-kube-api-access-jp4sc\") pod \"redhat-marketplace-9zmmb\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.269786 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-catalog-content\") pod \"redhat-marketplace-9zmmb\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.269927 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-utilities\") pod \"redhat-marketplace-9zmmb\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.287803 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jp4sc\" (UniqueName: \"kubernetes.io/projected/3264fca8-030f-4f4f-bb5a-01e958ae19ed-kube-api-access-jp4sc\") pod \"redhat-marketplace-9zmmb\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.344047 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.421894 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:18:43 crc kubenswrapper[4922]: E0930 00:18:43.422406 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.810149 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zmmb"] Sep 30 00:18:43 crc kubenswrapper[4922]: I0930 00:18:43.980325 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zmmb" event={"ID":"3264fca8-030f-4f4f-bb5a-01e958ae19ed","Type":"ContainerStarted","Data":"12c927909f61e40e73a3811ec520bd821fa97588ab5e4f3df973ad2fc60e89b1"} Sep 30 00:18:44 crc kubenswrapper[4922]: I0930 00:18:44.996339 4922 generic.go:334] "Generic (PLEG): container finished" podID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerID="5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e" exitCode=0 Sep 30 00:18:44 crc kubenswrapper[4922]: I0930 00:18:44.997455 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zmmb" event={"ID":"3264fca8-030f-4f4f-bb5a-01e958ae19ed","Type":"ContainerDied","Data":"5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e"} Sep 30 00:18:46 crc kubenswrapper[4922]: I0930 00:18:46.011401 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zmmb" event={"ID":"3264fca8-030f-4f4f-bb5a-01e958ae19ed","Type":"ContainerStarted","Data":"6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce"} Sep 30 00:18:47 crc kubenswrapper[4922]: I0930 00:18:47.021039 4922 generic.go:334] "Generic (PLEG): container finished" podID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerID="6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce" exitCode=0 Sep 30 00:18:47 crc kubenswrapper[4922]: I0930 00:18:47.022249 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zmmb" event={"ID":"3264fca8-030f-4f4f-bb5a-01e958ae19ed","Type":"ContainerDied","Data":"6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce"} Sep 30 00:18:48 crc kubenswrapper[4922]: I0930 00:18:48.039559 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zmmb" event={"ID":"3264fca8-030f-4f4f-bb5a-01e958ae19ed","Type":"ContainerStarted","Data":"bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2"} Sep 30 00:18:48 crc kubenswrapper[4922]: I0930 00:18:48.059936 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9zmmb" podStartSLOduration=3.637245811 podStartE2EDuration="6.059918121s" podCreationTimestamp="2025-09-30 00:18:42 +0000 UTC" firstStartedPulling="2025-09-30 00:18:45.003792401 +0000 UTC m=+6729.314081224" lastFinishedPulling="2025-09-30 00:18:47.426464721 +0000 UTC m=+6731.736753534" observedRunningTime="2025-09-30 00:18:48.055163193 +0000 UTC m=+6732.365452006" watchObservedRunningTime="2025-09-30 00:18:48.059918121 +0000 UTC m=+6732.370206924" Sep 30 00:18:53 crc kubenswrapper[4922]: I0930 00:18:53.345120 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:53 crc kubenswrapper[4922]: I0930 00:18:53.345838 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:53 crc kubenswrapper[4922]: I0930 00:18:53.396714 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:54 crc kubenswrapper[4922]: I0930 00:18:54.165106 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:54 crc kubenswrapper[4922]: I0930 00:18:54.229006 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zmmb"] Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.125774 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9zmmb" podUID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerName="registry-server" containerID="cri-o://bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2" gracePeriod=2 Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.784804 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.893578 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jp4sc\" (UniqueName: \"kubernetes.io/projected/3264fca8-030f-4f4f-bb5a-01e958ae19ed-kube-api-access-jp4sc\") pod \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.893648 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-catalog-content\") pod \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.893689 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-utilities\") pod \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\" (UID: \"3264fca8-030f-4f4f-bb5a-01e958ae19ed\") " Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.895024 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-utilities" (OuterVolumeSpecName: "utilities") pod "3264fca8-030f-4f4f-bb5a-01e958ae19ed" (UID: "3264fca8-030f-4f4f-bb5a-01e958ae19ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.905800 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3264fca8-030f-4f4f-bb5a-01e958ae19ed-kube-api-access-jp4sc" (OuterVolumeSpecName: "kube-api-access-jp4sc") pod "3264fca8-030f-4f4f-bb5a-01e958ae19ed" (UID: "3264fca8-030f-4f4f-bb5a-01e958ae19ed"). InnerVolumeSpecName "kube-api-access-jp4sc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.946136 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3264fca8-030f-4f4f-bb5a-01e958ae19ed" (UID: "3264fca8-030f-4f4f-bb5a-01e958ae19ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.995768 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jp4sc\" (UniqueName: \"kubernetes.io/projected/3264fca8-030f-4f4f-bb5a-01e958ae19ed-kube-api-access-jp4sc\") on node \"crc\" DevicePath \"\"" Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.995806 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:18:56 crc kubenswrapper[4922]: I0930 00:18:56.995815 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3264fca8-030f-4f4f-bb5a-01e958ae19ed-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.138976 4922 generic.go:334] "Generic (PLEG): container finished" podID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerID="bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2" exitCode=0 Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.139025 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9zmmb" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.140187 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zmmb" event={"ID":"3264fca8-030f-4f4f-bb5a-01e958ae19ed","Type":"ContainerDied","Data":"bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2"} Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.140362 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9zmmb" event={"ID":"3264fca8-030f-4f4f-bb5a-01e958ae19ed","Type":"ContainerDied","Data":"12c927909f61e40e73a3811ec520bd821fa97588ab5e4f3df973ad2fc60e89b1"} Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.140662 4922 scope.go:117] "RemoveContainer" containerID="bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.187085 4922 scope.go:117] "RemoveContainer" containerID="6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.187803 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zmmb"] Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.200435 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9zmmb"] Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.239914 4922 scope.go:117] "RemoveContainer" containerID="5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.265514 4922 scope.go:117] "RemoveContainer" containerID="bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2" Sep 30 00:18:57 crc kubenswrapper[4922]: E0930 00:18:57.265946 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2\": container with ID starting with bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2 not found: ID does not exist" containerID="bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.266011 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2"} err="failed to get container status \"bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2\": rpc error: code = NotFound desc = could not find container \"bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2\": container with ID starting with bd6e7b5c16a5ecde68cd7bd3837092d52509d27cfb833227db64b857df1338f2 not found: ID does not exist" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.266056 4922 scope.go:117] "RemoveContainer" containerID="6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce" Sep 30 00:18:57 crc kubenswrapper[4922]: E0930 00:18:57.266641 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce\": container with ID starting with 6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce not found: ID does not exist" containerID="6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.266707 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce"} err="failed to get container status \"6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce\": rpc error: code = NotFound desc = could not find container \"6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce\": container with ID starting with 6a9c7aa54b5f21b50cdc878d65d6b9d39c8203f329fdff5f51489bb7b9f815ce not found: ID does not exist" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.266786 4922 scope.go:117] "RemoveContainer" containerID="5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e" Sep 30 00:18:57 crc kubenswrapper[4922]: E0930 00:18:57.267204 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e\": container with ID starting with 5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e not found: ID does not exist" containerID="5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.267264 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e"} err="failed to get container status \"5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e\": rpc error: code = NotFound desc = could not find container \"5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e\": container with ID starting with 5338e70e2159541426cf41c8b18ed278027d52b63471a8b01b924bcf71f7b59e not found: ID does not exist" Sep 30 00:18:57 crc kubenswrapper[4922]: I0930 00:18:57.422465 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:18:57 crc kubenswrapper[4922]: E0930 00:18:57.422856 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:18:58 crc kubenswrapper[4922]: I0930 00:18:58.439130 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" path="/var/lib/kubelet/pods/3264fca8-030f-4f4f-bb5a-01e958ae19ed/volumes" Sep 30 00:19:10 crc kubenswrapper[4922]: I0930 00:19:10.421589 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:19:10 crc kubenswrapper[4922]: E0930 00:19:10.422458 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:19:24 crc kubenswrapper[4922]: I0930 00:19:24.422996 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:19:24 crc kubenswrapper[4922]: E0930 00:19:24.424510 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:19:35 crc kubenswrapper[4922]: I0930 00:19:35.421787 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:19:35 crc kubenswrapper[4922]: E0930 00:19:35.422717 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:19:48 crc kubenswrapper[4922]: I0930 00:19:48.424966 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:19:48 crc kubenswrapper[4922]: E0930 00:19:48.425881 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:19:51 crc kubenswrapper[4922]: I0930 00:19:51.042853 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-nwltr"] Sep 30 00:19:51 crc kubenswrapper[4922]: I0930 00:19:51.058802 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-nwltr"] Sep 30 00:19:52 crc kubenswrapper[4922]: I0930 00:19:52.437450 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb45e14c-f6a0-4fc6-b74d-c3f44cda04df" path="/var/lib/kubelet/pods/cb45e14c-f6a0-4fc6-b74d-c3f44cda04df/volumes" Sep 30 00:19:57 crc kubenswrapper[4922]: I0930 00:19:57.498292 4922 scope.go:117] "RemoveContainer" containerID="751be494e5d3b82cb56fc8c03584f37c3f1eca6ca4779eb4920f9779c20291a9" Sep 30 00:20:02 crc kubenswrapper[4922]: I0930 00:20:02.045618 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-bbea-account-create-t7zn6"] Sep 30 00:20:02 crc kubenswrapper[4922]: I0930 00:20:02.057504 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-bbea-account-create-t7zn6"] Sep 30 00:20:02 crc kubenswrapper[4922]: I0930 00:20:02.422515 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:20:02 crc kubenswrapper[4922]: E0930 00:20:02.423475 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:20:02 crc kubenswrapper[4922]: I0930 00:20:02.442500 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c61328e6-b9b4-420b-a24c-ea06d50fbcc6" path="/var/lib/kubelet/pods/c61328e6-b9b4-420b-a24c-ea06d50fbcc6/volumes" Sep 30 00:20:13 crc kubenswrapper[4922]: I0930 00:20:13.422874 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:20:13 crc kubenswrapper[4922]: E0930 00:20:13.423839 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:20:18 crc kubenswrapper[4922]: I0930 00:20:18.059811 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-qfs45"] Sep 30 00:20:18 crc kubenswrapper[4922]: I0930 00:20:18.073239 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-qfs45"] Sep 30 00:20:18 crc kubenswrapper[4922]: I0930 00:20:18.441142 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d" path="/var/lib/kubelet/pods/6724fafe-3e0a-4ffc-afc9-ea8cd29ebf7d/volumes" Sep 30 00:20:24 crc kubenswrapper[4922]: I0930 00:20:24.422427 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:20:24 crc kubenswrapper[4922]: E0930 00:20:24.423093 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:20:35 crc kubenswrapper[4922]: I0930 00:20:35.422290 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:20:35 crc kubenswrapper[4922]: E0930 00:20:35.423549 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:20:50 crc kubenswrapper[4922]: I0930 00:20:50.421857 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:20:50 crc kubenswrapper[4922]: E0930 00:20:50.422930 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:20:57 crc kubenswrapper[4922]: I0930 00:20:57.562789 4922 scope.go:117] "RemoveContainer" containerID="145bb6bcc6e74bfcdeda5e53dfbdebd065c800a41bd45f9ec975c553998e1977" Sep 30 00:20:57 crc kubenswrapper[4922]: I0930 00:20:57.600676 4922 scope.go:117] "RemoveContainer" containerID="15628ca873c68d3ef55f35a47c181ddf28c20d1f8703afdd45801b78bbd60d2c" Sep 30 00:21:02 crc kubenswrapper[4922]: I0930 00:21:02.421653 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:21:02 crc kubenswrapper[4922]: E0930 00:21:02.422498 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:21:16 crc kubenswrapper[4922]: I0930 00:21:16.436418 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:21:16 crc kubenswrapper[4922]: E0930 00:21:16.437850 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:21:29 crc kubenswrapper[4922]: I0930 00:21:29.423977 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:21:29 crc kubenswrapper[4922]: E0930 00:21:29.425112 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:21:42 crc kubenswrapper[4922]: I0930 00:21:42.422179 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:21:42 crc kubenswrapper[4922]: E0930 00:21:42.422988 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:21:55 crc kubenswrapper[4922]: I0930 00:21:55.421809 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:21:55 crc kubenswrapper[4922]: E0930 00:21:55.422882 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:21:57 crc kubenswrapper[4922]: I0930 00:21:57.717111 4922 scope.go:117] "RemoveContainer" containerID="6b9ce2feda0943496f715a20b573043fe3cb201758c0e162f1d08da5a7053b43" Sep 30 00:21:57 crc kubenswrapper[4922]: I0930 00:21:57.755069 4922 scope.go:117] "RemoveContainer" containerID="55d4ee68645dd0eeda0f32651ba2b20b6e93de6e63c5649dca51cece70235075" Sep 30 00:22:10 crc kubenswrapper[4922]: I0930 00:22:10.422470 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:22:10 crc kubenswrapper[4922]: E0930 00:22:10.423292 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:22:21 crc kubenswrapper[4922]: I0930 00:22:21.428792 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:22:21 crc kubenswrapper[4922]: E0930 00:22:21.429448 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:22:33 crc kubenswrapper[4922]: I0930 00:22:33.423779 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:22:33 crc kubenswrapper[4922]: E0930 00:22:33.426204 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:22:48 crc kubenswrapper[4922]: I0930 00:22:48.422954 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:22:48 crc kubenswrapper[4922]: E0930 00:22:48.424107 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:22:57 crc kubenswrapper[4922]: I0930 00:22:57.834543 4922 scope.go:117] "RemoveContainer" containerID="ab8f0ae113a99df4623b08f983b1820c75d88368f5b28b3d4ce686297112b4e4" Sep 30 00:22:59 crc kubenswrapper[4922]: I0930 00:22:59.422291 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:22:59 crc kubenswrapper[4922]: E0930 00:22:59.422908 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:23:14 crc kubenswrapper[4922]: I0930 00:23:14.421605 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:23:14 crc kubenswrapper[4922]: E0930 00:23:14.422440 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:23:26 crc kubenswrapper[4922]: I0930 00:23:26.052982 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-5fdvz"] Sep 30 00:23:26 crc kubenswrapper[4922]: I0930 00:23:26.079175 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-5fdvz"] Sep 30 00:23:26 crc kubenswrapper[4922]: I0930 00:23:26.444224 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbc9a231-7e7e-43bd-ad0c-66a5642601c5" path="/var/lib/kubelet/pods/bbc9a231-7e7e-43bd-ad0c-66a5642601c5/volumes" Sep 30 00:23:29 crc kubenswrapper[4922]: I0930 00:23:29.422667 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:23:30 crc kubenswrapper[4922]: I0930 00:23:30.441955 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"1322df6e2f654d152f0278d46a5ceecb4cd172fb6f73a71644be5c0a0d23a702"} Sep 30 00:23:36 crc kubenswrapper[4922]: I0930 00:23:36.043717 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-e99a-account-create-w5sqs"] Sep 30 00:23:36 crc kubenswrapper[4922]: I0930 00:23:36.052429 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-e99a-account-create-w5sqs"] Sep 30 00:23:36 crc kubenswrapper[4922]: I0930 00:23:36.446475 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06fe4f66-4cfc-4807-a952-ceb025f473c3" path="/var/lib/kubelet/pods/06fe4f66-4cfc-4807-a952-ceb025f473c3/volumes" Sep 30 00:23:48 crc kubenswrapper[4922]: I0930 00:23:48.061727 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-zf7ks"] Sep 30 00:23:48 crc kubenswrapper[4922]: I0930 00:23:48.072854 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-zf7ks"] Sep 30 00:23:48 crc kubenswrapper[4922]: I0930 00:23:48.445110 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="215855a2-d48f-4456-bbd2-4bc1bc6e260a" path="/var/lib/kubelet/pods/215855a2-d48f-4456-bbd2-4bc1bc6e260a/volumes" Sep 30 00:23:57 crc kubenswrapper[4922]: I0930 00:23:57.927696 4922 scope.go:117] "RemoveContainer" containerID="f40caf44cde83805e7d5d16e343d01a16c0f7627bd698b3b85610bd6d4942095" Sep 30 00:23:57 crc kubenswrapper[4922]: I0930 00:23:57.993420 4922 scope.go:117] "RemoveContainer" containerID="4778ae2cf51787f6e2f25e80b26e47f42f43ce5f8a21602c6650d5f2a338f577" Sep 30 00:23:58 crc kubenswrapper[4922]: I0930 00:23:58.061815 4922 scope.go:117] "RemoveContainer" containerID="785ae5af3d03fc458b06c8ebaa5f7754922c57882cd668430bb20a0798086bed" Sep 30 00:24:09 crc kubenswrapper[4922]: I0930 00:24:09.048637 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-rmh6j"] Sep 30 00:24:09 crc kubenswrapper[4922]: I0930 00:24:09.056897 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-rmh6j"] Sep 30 00:24:10 crc kubenswrapper[4922]: I0930 00:24:10.448993 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ec28608-d912-4c45-b98b-3eb6bb4d4489" path="/var/lib/kubelet/pods/3ec28608-d912-4c45-b98b-3eb6bb4d4489/volumes" Sep 30 00:24:19 crc kubenswrapper[4922]: I0930 00:24:19.042280 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-a833-account-create-tqf75"] Sep 30 00:24:19 crc kubenswrapper[4922]: I0930 00:24:19.052235 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-a833-account-create-tqf75"] Sep 30 00:24:20 crc kubenswrapper[4922]: I0930 00:24:20.439085 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c579a7b6-dcc5-40e0-8661-d71093ffc017" path="/var/lib/kubelet/pods/c579a7b6-dcc5-40e0-8661-d71093ffc017/volumes" Sep 30 00:24:31 crc kubenswrapper[4922]: I0930 00:24:31.057526 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-7njnn"] Sep 30 00:24:31 crc kubenswrapper[4922]: I0930 00:24:31.074944 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-7njnn"] Sep 30 00:24:32 crc kubenswrapper[4922]: I0930 00:24:32.433329 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bae59364-688d-42c9-9dad-6d8702b79983" path="/var/lib/kubelet/pods/bae59364-688d-42c9-9dad-6d8702b79983/volumes" Sep 30 00:24:58 crc kubenswrapper[4922]: I0930 00:24:58.209077 4922 scope.go:117] "RemoveContainer" containerID="efd4f2910aa83ecebae441b42237ffa1bd99d9aa4f5a881a6ea1e45083ec288b" Sep 30 00:24:58 crc kubenswrapper[4922]: I0930 00:24:58.232322 4922 scope.go:117] "RemoveContainer" containerID="2da2f8eeb36ee3b8a406020fbee9252b30e0a89b09702d047c91da8ae89875d0" Sep 30 00:24:58 crc kubenswrapper[4922]: I0930 00:24:58.284042 4922 scope.go:117] "RemoveContainer" containerID="8ab21c9a7dfc16e619a8df6b7cd381af299ecef44cb93b804ff4e9fc9da67e62" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.495277 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cjdns"] Sep 30 00:25:52 crc kubenswrapper[4922]: E0930 00:25:52.496583 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerName="extract-utilities" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.496608 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerName="extract-utilities" Sep 30 00:25:52 crc kubenswrapper[4922]: E0930 00:25:52.496646 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerName="extract-content" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.496658 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerName="extract-content" Sep 30 00:25:52 crc kubenswrapper[4922]: E0930 00:25:52.496683 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerName="registry-server" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.496694 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerName="registry-server" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.503026 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3264fca8-030f-4f4f-bb5a-01e958ae19ed" containerName="registry-server" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.505739 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.511046 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjdns"] Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.597975 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-utilities\") pod \"redhat-operators-cjdns\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.598037 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-catalog-content\") pod \"redhat-operators-cjdns\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.598182 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8fjq\" (UniqueName: \"kubernetes.io/projected/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-kube-api-access-f8fjq\") pod \"redhat-operators-cjdns\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.699809 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-utilities\") pod \"redhat-operators-cjdns\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.699875 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-catalog-content\") pod \"redhat-operators-cjdns\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.699990 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8fjq\" (UniqueName: \"kubernetes.io/projected/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-kube-api-access-f8fjq\") pod \"redhat-operators-cjdns\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.700317 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-utilities\") pod \"redhat-operators-cjdns\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.700633 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-catalog-content\") pod \"redhat-operators-cjdns\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.725693 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8fjq\" (UniqueName: \"kubernetes.io/projected/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-kube-api-access-f8fjq\") pod \"redhat-operators-cjdns\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:52 crc kubenswrapper[4922]: I0930 00:25:52.842871 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:25:53 crc kubenswrapper[4922]: I0930 00:25:53.396468 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cjdns"] Sep 30 00:25:54 crc kubenswrapper[4922]: I0930 00:25:54.242553 4922 generic.go:334] "Generic (PLEG): container finished" podID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerID="9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c" exitCode=0 Sep 30 00:25:54 crc kubenswrapper[4922]: I0930 00:25:54.242725 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjdns" event={"ID":"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966","Type":"ContainerDied","Data":"9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c"} Sep 30 00:25:54 crc kubenswrapper[4922]: I0930 00:25:54.243033 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjdns" event={"ID":"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966","Type":"ContainerStarted","Data":"37095417f0d06ca5827391380fbb59f038a7dcc0f2710b5350afdda8b146140d"} Sep 30 00:25:54 crc kubenswrapper[4922]: I0930 00:25:54.245138 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:25:56 crc kubenswrapper[4922]: I0930 00:25:56.290150 4922 generic.go:334] "Generic (PLEG): container finished" podID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerID="c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef" exitCode=0 Sep 30 00:25:56 crc kubenswrapper[4922]: I0930 00:25:56.290208 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjdns" event={"ID":"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966","Type":"ContainerDied","Data":"c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef"} Sep 30 00:25:58 crc kubenswrapper[4922]: I0930 00:25:58.314901 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjdns" event={"ID":"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966","Type":"ContainerStarted","Data":"060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c"} Sep 30 00:25:58 crc kubenswrapper[4922]: I0930 00:25:58.344953 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cjdns" podStartSLOduration=3.6597271989999998 podStartE2EDuration="6.344930935s" podCreationTimestamp="2025-09-30 00:25:52 +0000 UTC" firstStartedPulling="2025-09-30 00:25:54.244853355 +0000 UTC m=+7158.555142168" lastFinishedPulling="2025-09-30 00:25:56.930057091 +0000 UTC m=+7161.240345904" observedRunningTime="2025-09-30 00:25:58.335985934 +0000 UTC m=+7162.646274777" watchObservedRunningTime="2025-09-30 00:25:58.344930935 +0000 UTC m=+7162.655219748" Sep 30 00:25:58 crc kubenswrapper[4922]: I0930 00:25:58.912621 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:25:58 crc kubenswrapper[4922]: I0930 00:25:58.912687 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:26:02 crc kubenswrapper[4922]: I0930 00:26:02.843882 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:26:02 crc kubenswrapper[4922]: I0930 00:26:02.844785 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:26:02 crc kubenswrapper[4922]: I0930 00:26:02.931658 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:26:03 crc kubenswrapper[4922]: I0930 00:26:03.444752 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:26:03 crc kubenswrapper[4922]: I0930 00:26:03.517890 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjdns"] Sep 30 00:26:05 crc kubenswrapper[4922]: I0930 00:26:05.389179 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cjdns" podUID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerName="registry-server" containerID="cri-o://060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c" gracePeriod=2 Sep 30 00:26:05 crc kubenswrapper[4922]: I0930 00:26:05.930456 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:26:05 crc kubenswrapper[4922]: I0930 00:26:05.989831 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8fjq\" (UniqueName: \"kubernetes.io/projected/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-kube-api-access-f8fjq\") pod \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " Sep 30 00:26:05 crc kubenswrapper[4922]: I0930 00:26:05.990665 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-utilities\") pod \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " Sep 30 00:26:05 crc kubenswrapper[4922]: I0930 00:26:05.991216 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-catalog-content\") pod \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\" (UID: \"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966\") " Sep 30 00:26:05 crc kubenswrapper[4922]: I0930 00:26:05.991316 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-utilities" (OuterVolumeSpecName: "utilities") pod "4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" (UID: "4bbe9c5a-22f6-4fa2-9ab4-16388d83c966"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:26:05 crc kubenswrapper[4922]: I0930 00:26:05.992603 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:26:05 crc kubenswrapper[4922]: I0930 00:26:05.999647 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-kube-api-access-f8fjq" (OuterVolumeSpecName: "kube-api-access-f8fjq") pod "4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" (UID: "4bbe9c5a-22f6-4fa2-9ab4-16388d83c966"). InnerVolumeSpecName "kube-api-access-f8fjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.085952 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" (UID: "4bbe9c5a-22f6-4fa2-9ab4-16388d83c966"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.095549 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8fjq\" (UniqueName: \"kubernetes.io/projected/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-kube-api-access-f8fjq\") on node \"crc\" DevicePath \"\"" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.095606 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.402785 4922 generic.go:334] "Generic (PLEG): container finished" podID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerID="060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c" exitCode=0 Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.402860 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjdns" event={"ID":"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966","Type":"ContainerDied","Data":"060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c"} Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.402867 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cjdns" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.403157 4922 scope.go:117] "RemoveContainer" containerID="060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.403142 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cjdns" event={"ID":"4bbe9c5a-22f6-4fa2-9ab4-16388d83c966","Type":"ContainerDied","Data":"37095417f0d06ca5827391380fbb59f038a7dcc0f2710b5350afdda8b146140d"} Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.438685 4922 scope.go:117] "RemoveContainer" containerID="c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.481343 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cjdns"] Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.483124 4922 scope.go:117] "RemoveContainer" containerID="9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.494438 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cjdns"] Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.549251 4922 scope.go:117] "RemoveContainer" containerID="060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c" Sep 30 00:26:06 crc kubenswrapper[4922]: E0930 00:26:06.550059 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c\": container with ID starting with 060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c not found: ID does not exist" containerID="060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.550121 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c"} err="failed to get container status \"060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c\": rpc error: code = NotFound desc = could not find container \"060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c\": container with ID starting with 060d0a9c6e9ba93fb2b42c8fc81dd204c09f46f826a234770660e9576e99724c not found: ID does not exist" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.550148 4922 scope.go:117] "RemoveContainer" containerID="c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef" Sep 30 00:26:06 crc kubenswrapper[4922]: E0930 00:26:06.550557 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef\": container with ID starting with c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef not found: ID does not exist" containerID="c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.550597 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef"} err="failed to get container status \"c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef\": rpc error: code = NotFound desc = could not find container \"c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef\": container with ID starting with c5e7329a448aaaefc6fe3f1170b078dec85c7c581be36b16a3c73de37c0f53ef not found: ID does not exist" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.550621 4922 scope.go:117] "RemoveContainer" containerID="9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c" Sep 30 00:26:06 crc kubenswrapper[4922]: E0930 00:26:06.550965 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c\": container with ID starting with 9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c not found: ID does not exist" containerID="9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c" Sep 30 00:26:06 crc kubenswrapper[4922]: I0930 00:26:06.551006 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c"} err="failed to get container status \"9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c\": rpc error: code = NotFound desc = could not find container \"9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c\": container with ID starting with 9270d6df422d85d2ed9d02e5e71a8b11441186a36815102940387b00a9e55b4c not found: ID does not exist" Sep 30 00:26:08 crc kubenswrapper[4922]: I0930 00:26:08.440629 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" path="/var/lib/kubelet/pods/4bbe9c5a-22f6-4fa2-9ab4-16388d83c966/volumes" Sep 30 00:26:28 crc kubenswrapper[4922]: I0930 00:26:28.913015 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:26:28 crc kubenswrapper[4922]: I0930 00:26:28.913706 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:26:38 crc kubenswrapper[4922]: I0930 00:26:38.822831 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mdgfh"] Sep 30 00:26:38 crc kubenswrapper[4922]: E0930 00:26:38.824119 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerName="registry-server" Sep 30 00:26:38 crc kubenswrapper[4922]: I0930 00:26:38.824141 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerName="registry-server" Sep 30 00:26:38 crc kubenswrapper[4922]: E0930 00:26:38.824192 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerName="extract-utilities" Sep 30 00:26:38 crc kubenswrapper[4922]: I0930 00:26:38.824203 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerName="extract-utilities" Sep 30 00:26:38 crc kubenswrapper[4922]: E0930 00:26:38.824241 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerName="extract-content" Sep 30 00:26:38 crc kubenswrapper[4922]: I0930 00:26:38.824253 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerName="extract-content" Sep 30 00:26:38 crc kubenswrapper[4922]: I0930 00:26:38.824642 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bbe9c5a-22f6-4fa2-9ab4-16388d83c966" containerName="registry-server" Sep 30 00:26:38 crc kubenswrapper[4922]: I0930 00:26:38.829509 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:38 crc kubenswrapper[4922]: I0930 00:26:38.850509 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mdgfh"] Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.018718 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-utilities\") pod \"certified-operators-mdgfh\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.018777 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-catalog-content\") pod \"certified-operators-mdgfh\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.018808 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7qrs\" (UniqueName: \"kubernetes.io/projected/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-kube-api-access-r7qrs\") pod \"certified-operators-mdgfh\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.121798 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-utilities\") pod \"certified-operators-mdgfh\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.121869 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-catalog-content\") pod \"certified-operators-mdgfh\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.121914 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7qrs\" (UniqueName: \"kubernetes.io/projected/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-kube-api-access-r7qrs\") pod \"certified-operators-mdgfh\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.122712 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-catalog-content\") pod \"certified-operators-mdgfh\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.122750 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-utilities\") pod \"certified-operators-mdgfh\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.144777 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7qrs\" (UniqueName: \"kubernetes.io/projected/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-kube-api-access-r7qrs\") pod \"certified-operators-mdgfh\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.161030 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.677850 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mdgfh"] Sep 30 00:26:39 crc kubenswrapper[4922]: I0930 00:26:39.817368 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mdgfh" event={"ID":"d2095e5b-567d-45a7-98fc-98e4ba66a3b2","Type":"ContainerStarted","Data":"e79fd3ac02a40ea8307bd9b5eec4714cff373deb6877ffac9ae3a7b9d89c015d"} Sep 30 00:26:40 crc kubenswrapper[4922]: I0930 00:26:40.829943 4922 generic.go:334] "Generic (PLEG): container finished" podID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerID="3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d" exitCode=0 Sep 30 00:26:40 crc kubenswrapper[4922]: I0930 00:26:40.830056 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mdgfh" event={"ID":"d2095e5b-567d-45a7-98fc-98e4ba66a3b2","Type":"ContainerDied","Data":"3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d"} Sep 30 00:26:42 crc kubenswrapper[4922]: I0930 00:26:42.861966 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mdgfh" event={"ID":"d2095e5b-567d-45a7-98fc-98e4ba66a3b2","Type":"ContainerStarted","Data":"13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396"} Sep 30 00:26:43 crc kubenswrapper[4922]: I0930 00:26:43.882130 4922 generic.go:334] "Generic (PLEG): container finished" podID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerID="13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396" exitCode=0 Sep 30 00:26:43 crc kubenswrapper[4922]: I0930 00:26:43.882357 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mdgfh" event={"ID":"d2095e5b-567d-45a7-98fc-98e4ba66a3b2","Type":"ContainerDied","Data":"13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396"} Sep 30 00:26:44 crc kubenswrapper[4922]: I0930 00:26:44.895118 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mdgfh" event={"ID":"d2095e5b-567d-45a7-98fc-98e4ba66a3b2","Type":"ContainerStarted","Data":"6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc"} Sep 30 00:26:44 crc kubenswrapper[4922]: I0930 00:26:44.919346 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mdgfh" podStartSLOduration=3.303799955 podStartE2EDuration="6.919330205s" podCreationTimestamp="2025-09-30 00:26:38 +0000 UTC" firstStartedPulling="2025-09-30 00:26:40.832130575 +0000 UTC m=+7205.142419418" lastFinishedPulling="2025-09-30 00:26:44.447660825 +0000 UTC m=+7208.757949668" observedRunningTime="2025-09-30 00:26:44.91831391 +0000 UTC m=+7209.228602723" watchObservedRunningTime="2025-09-30 00:26:44.919330205 +0000 UTC m=+7209.229619018" Sep 30 00:26:49 crc kubenswrapper[4922]: I0930 00:26:49.162190 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:49 crc kubenswrapper[4922]: I0930 00:26:49.162848 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:49 crc kubenswrapper[4922]: I0930 00:26:49.273588 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:50 crc kubenswrapper[4922]: I0930 00:26:50.048422 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:50 crc kubenswrapper[4922]: I0930 00:26:50.114673 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mdgfh"] Sep 30 00:26:51 crc kubenswrapper[4922]: I0930 00:26:51.997512 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mdgfh" podUID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerName="registry-server" containerID="cri-o://6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc" gracePeriod=2 Sep 30 00:26:52 crc kubenswrapper[4922]: I0930 00:26:52.831344 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.002876 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-catalog-content\") pod \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.004822 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7qrs\" (UniqueName: \"kubernetes.io/projected/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-kube-api-access-r7qrs\") pod \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.004959 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-utilities\") pod \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\" (UID: \"d2095e5b-567d-45a7-98fc-98e4ba66a3b2\") " Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.010113 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-utilities" (OuterVolumeSpecName: "utilities") pod "d2095e5b-567d-45a7-98fc-98e4ba66a3b2" (UID: "d2095e5b-567d-45a7-98fc-98e4ba66a3b2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.041975 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-kube-api-access-r7qrs" (OuterVolumeSpecName: "kube-api-access-r7qrs") pod "d2095e5b-567d-45a7-98fc-98e4ba66a3b2" (UID: "d2095e5b-567d-45a7-98fc-98e4ba66a3b2"). InnerVolumeSpecName "kube-api-access-r7qrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.091423 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2095e5b-567d-45a7-98fc-98e4ba66a3b2" (UID: "d2095e5b-567d-45a7-98fc-98e4ba66a3b2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.097958 4922 generic.go:334] "Generic (PLEG): container finished" podID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerID="6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc" exitCode=0 Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.098002 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mdgfh" event={"ID":"d2095e5b-567d-45a7-98fc-98e4ba66a3b2","Type":"ContainerDied","Data":"6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc"} Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.098029 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mdgfh" event={"ID":"d2095e5b-567d-45a7-98fc-98e4ba66a3b2","Type":"ContainerDied","Data":"e79fd3ac02a40ea8307bd9b5eec4714cff373deb6877ffac9ae3a7b9d89c015d"} Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.098046 4922 scope.go:117] "RemoveContainer" containerID="6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.098207 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mdgfh" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.109125 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.109151 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.109162 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7qrs\" (UniqueName: \"kubernetes.io/projected/d2095e5b-567d-45a7-98fc-98e4ba66a3b2-kube-api-access-r7qrs\") on node \"crc\" DevicePath \"\"" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.122320 4922 scope.go:117] "RemoveContainer" containerID="13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.140836 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mdgfh"] Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.149135 4922 scope.go:117] "RemoveContainer" containerID="3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.153270 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mdgfh"] Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.190127 4922 scope.go:117] "RemoveContainer" containerID="6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc" Sep 30 00:26:53 crc kubenswrapper[4922]: E0930 00:26:53.190594 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc\": container with ID starting with 6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc not found: ID does not exist" containerID="6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.190735 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc"} err="failed to get container status \"6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc\": rpc error: code = NotFound desc = could not find container \"6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc\": container with ID starting with 6756925aa738f07291c4e2e91146fd59c524601f06dd5ead8e934604757d4fbc not found: ID does not exist" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.190770 4922 scope.go:117] "RemoveContainer" containerID="13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396" Sep 30 00:26:53 crc kubenswrapper[4922]: E0930 00:26:53.191140 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396\": container with ID starting with 13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396 not found: ID does not exist" containerID="13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.191183 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396"} err="failed to get container status \"13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396\": rpc error: code = NotFound desc = could not find container \"13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396\": container with ID starting with 13d0b0f681fdea693539d029c350ab7235c85292798cc3da7f59e740a0279396 not found: ID does not exist" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.191316 4922 scope.go:117] "RemoveContainer" containerID="3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d" Sep 30 00:26:53 crc kubenswrapper[4922]: E0930 00:26:53.191888 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d\": container with ID starting with 3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d not found: ID does not exist" containerID="3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d" Sep 30 00:26:53 crc kubenswrapper[4922]: I0930 00:26:53.191940 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d"} err="failed to get container status \"3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d\": rpc error: code = NotFound desc = could not find container \"3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d\": container with ID starting with 3850c78cf66dc9570b29eb5d048e54e2d413bf2a7ef9e33ffcc2d050bedef74d not found: ID does not exist" Sep 30 00:26:54 crc kubenswrapper[4922]: I0930 00:26:54.435557 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" path="/var/lib/kubelet/pods/d2095e5b-567d-45a7-98fc-98e4ba66a3b2/volumes" Sep 30 00:26:58 crc kubenswrapper[4922]: I0930 00:26:58.913433 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:26:58 crc kubenswrapper[4922]: I0930 00:26:58.914296 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:26:58 crc kubenswrapper[4922]: I0930 00:26:58.914558 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:26:58 crc kubenswrapper[4922]: I0930 00:26:58.915783 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1322df6e2f654d152f0278d46a5ceecb4cd172fb6f73a71644be5c0a0d23a702"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:26:58 crc kubenswrapper[4922]: I0930 00:26:58.915876 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://1322df6e2f654d152f0278d46a5ceecb4cd172fb6f73a71644be5c0a0d23a702" gracePeriod=600 Sep 30 00:26:59 crc kubenswrapper[4922]: I0930 00:26:59.177235 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="1322df6e2f654d152f0278d46a5ceecb4cd172fb6f73a71644be5c0a0d23a702" exitCode=0 Sep 30 00:26:59 crc kubenswrapper[4922]: I0930 00:26:59.177300 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"1322df6e2f654d152f0278d46a5ceecb4cd172fb6f73a71644be5c0a0d23a702"} Sep 30 00:26:59 crc kubenswrapper[4922]: I0930 00:26:59.177355 4922 scope.go:117] "RemoveContainer" containerID="cb79f7e11034cf5a109bf5f3098105d71f6bcb7b5f2b2ff92f4714a15a64daed" Sep 30 00:27:00 crc kubenswrapper[4922]: I0930 00:27:00.192755 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6"} Sep 30 00:27:17 crc kubenswrapper[4922]: I0930 00:27:17.388008 4922 generic.go:334] "Generic (PLEG): container finished" podID="8caf595c-95ea-4701-b5e2-97e970cdf01b" containerID="ffcbc168ccdc21cb8e44fbfeed4697f893d5fe49415c7190aecf773ef43b3ffa" exitCode=0 Sep 30 00:27:17 crc kubenswrapper[4922]: I0930 00:27:17.388279 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" event={"ID":"8caf595c-95ea-4701-b5e2-97e970cdf01b","Type":"ContainerDied","Data":"ffcbc168ccdc21cb8e44fbfeed4697f893d5fe49415c7190aecf773ef43b3ffa"} Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.874154 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.934266 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ml8q\" (UniqueName: \"kubernetes.io/projected/8caf595c-95ea-4701-b5e2-97e970cdf01b-kube-api-access-7ml8q\") pod \"8caf595c-95ea-4701-b5e2-97e970cdf01b\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.934324 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ceph\") pod \"8caf595c-95ea-4701-b5e2-97e970cdf01b\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.934399 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ssh-key\") pod \"8caf595c-95ea-4701-b5e2-97e970cdf01b\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.934428 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-tripleo-cleanup-combined-ca-bundle\") pod \"8caf595c-95ea-4701-b5e2-97e970cdf01b\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.934554 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-inventory\") pod \"8caf595c-95ea-4701-b5e2-97e970cdf01b\" (UID: \"8caf595c-95ea-4701-b5e2-97e970cdf01b\") " Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.945994 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8caf595c-95ea-4701-b5e2-97e970cdf01b-kube-api-access-7ml8q" (OuterVolumeSpecName: "kube-api-access-7ml8q") pod "8caf595c-95ea-4701-b5e2-97e970cdf01b" (UID: "8caf595c-95ea-4701-b5e2-97e970cdf01b"). InnerVolumeSpecName "kube-api-access-7ml8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.946427 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ceph" (OuterVolumeSpecName: "ceph") pod "8caf595c-95ea-4701-b5e2-97e970cdf01b" (UID: "8caf595c-95ea-4701-b5e2-97e970cdf01b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.946459 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "8caf595c-95ea-4701-b5e2-97e970cdf01b" (UID: "8caf595c-95ea-4701-b5e2-97e970cdf01b"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:27:18 crc kubenswrapper[4922]: I0930 00:27:18.980527 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-inventory" (OuterVolumeSpecName: "inventory") pod "8caf595c-95ea-4701-b5e2-97e970cdf01b" (UID: "8caf595c-95ea-4701-b5e2-97e970cdf01b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:27:19 crc kubenswrapper[4922]: I0930 00:27:19.011540 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8caf595c-95ea-4701-b5e2-97e970cdf01b" (UID: "8caf595c-95ea-4701-b5e2-97e970cdf01b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:27:19 crc kubenswrapper[4922]: I0930 00:27:19.042949 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:19 crc kubenswrapper[4922]: I0930 00:27:19.042992 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ml8q\" (UniqueName: \"kubernetes.io/projected/8caf595c-95ea-4701-b5e2-97e970cdf01b-kube-api-access-7ml8q\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:19 crc kubenswrapper[4922]: I0930 00:27:19.043005 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:19 crc kubenswrapper[4922]: I0930 00:27:19.043015 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:19 crc kubenswrapper[4922]: I0930 00:27:19.043028 4922 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8caf595c-95ea-4701-b5e2-97e970cdf01b-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:27:19 crc kubenswrapper[4922]: I0930 00:27:19.413979 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" event={"ID":"8caf595c-95ea-4701-b5e2-97e970cdf01b","Type":"ContainerDied","Data":"a03be10ab44790217139558379f5e313c14028e5347f580d4a2d9d2abe9b48f1"} Sep 30 00:27:19 crc kubenswrapper[4922]: I0930 00:27:19.414013 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a03be10ab44790217139558379f5e313c14028e5347f580d4a2d9d2abe9b48f1" Sep 30 00:27:19 crc kubenswrapper[4922]: I0930 00:27:19.414086 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.650443 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-gjr5h"] Sep 30 00:27:26 crc kubenswrapper[4922]: E0930 00:27:26.651521 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerName="extract-content" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.651538 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerName="extract-content" Sep 30 00:27:26 crc kubenswrapper[4922]: E0930 00:27:26.651569 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerName="extract-utilities" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.651578 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerName="extract-utilities" Sep 30 00:27:26 crc kubenswrapper[4922]: E0930 00:27:26.651596 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8caf595c-95ea-4701-b5e2-97e970cdf01b" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.651608 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8caf595c-95ea-4701-b5e2-97e970cdf01b" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Sep 30 00:27:26 crc kubenswrapper[4922]: E0930 00:27:26.651635 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerName="registry-server" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.651644 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerName="registry-server" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.651889 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2095e5b-567d-45a7-98fc-98e4ba66a3b2" containerName="registry-server" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.651906 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8caf595c-95ea-4701-b5e2-97e970cdf01b" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.652851 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.655085 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.655164 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.656164 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.657860 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.667355 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-gjr5h"] Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.749831 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.750199 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.750226 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmlgm\" (UniqueName: \"kubernetes.io/projected/3d055d48-2bce-49dd-948e-03f2a6e95282-kube-api-access-vmlgm\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.750724 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ceph\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.750982 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-inventory\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.853604 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-inventory\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.853804 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.854020 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.854065 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmlgm\" (UniqueName: \"kubernetes.io/projected/3d055d48-2bce-49dd-948e-03f2a6e95282-kube-api-access-vmlgm\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.854178 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ceph\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.860896 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.861424 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ceph\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.861589 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-inventory\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.862660 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.884958 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmlgm\" (UniqueName: \"kubernetes.io/projected/3d055d48-2bce-49dd-948e-03f2a6e95282-kube-api-access-vmlgm\") pod \"bootstrap-openstack-openstack-cell1-gjr5h\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:26 crc kubenswrapper[4922]: I0930 00:27:26.971644 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:27:27 crc kubenswrapper[4922]: I0930 00:27:27.554292 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-gjr5h"] Sep 30 00:27:28 crc kubenswrapper[4922]: I0930 00:27:28.520226 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" event={"ID":"3d055d48-2bce-49dd-948e-03f2a6e95282","Type":"ContainerStarted","Data":"176e9387acde67ccf5206bd96ca915d368618b3911b602e90fa41daf83bca032"} Sep 30 00:27:28 crc kubenswrapper[4922]: I0930 00:27:28.520567 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" event={"ID":"3d055d48-2bce-49dd-948e-03f2a6e95282","Type":"ContainerStarted","Data":"19a580d2814958ef2c37e3bd84da61fa6f0d8ab56b4f006cf124f3837ca6d309"} Sep 30 00:27:28 crc kubenswrapper[4922]: I0930 00:27:28.556670 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" podStartSLOduration=2.3645023050000002 podStartE2EDuration="2.556643925s" podCreationTimestamp="2025-09-30 00:27:26 +0000 UTC" firstStartedPulling="2025-09-30 00:27:27.558800817 +0000 UTC m=+7251.869089650" lastFinishedPulling="2025-09-30 00:27:27.750942447 +0000 UTC m=+7252.061231270" observedRunningTime="2025-09-30 00:27:28.547811056 +0000 UTC m=+7252.858099879" watchObservedRunningTime="2025-09-30 00:27:28.556643925 +0000 UTC m=+7252.866932768" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.382301 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rns9m"] Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.385067 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.404097 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rns9m"] Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.535772 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkj9g\" (UniqueName: \"kubernetes.io/projected/d26af53c-85d7-4665-bbb6-3613c8db247f-kube-api-access-nkj9g\") pod \"redhat-marketplace-rns9m\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.536008 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-utilities\") pod \"redhat-marketplace-rns9m\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.536050 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-catalog-content\") pod \"redhat-marketplace-rns9m\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.637948 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkj9g\" (UniqueName: \"kubernetes.io/projected/d26af53c-85d7-4665-bbb6-3613c8db247f-kube-api-access-nkj9g\") pod \"redhat-marketplace-rns9m\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.638673 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-utilities\") pod \"redhat-marketplace-rns9m\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.638713 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-catalog-content\") pod \"redhat-marketplace-rns9m\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.639294 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-catalog-content\") pod \"redhat-marketplace-rns9m\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.639890 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-utilities\") pod \"redhat-marketplace-rns9m\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.683687 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkj9g\" (UniqueName: \"kubernetes.io/projected/d26af53c-85d7-4665-bbb6-3613c8db247f-kube-api-access-nkj9g\") pod \"redhat-marketplace-rns9m\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:08 crc kubenswrapper[4922]: I0930 00:29:08.742361 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.223051 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rns9m"] Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.258456 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.582668 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r5wmw"] Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.585236 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.598810 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r5wmw"] Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.659558 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-utilities\") pod \"community-operators-r5wmw\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.659940 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dffpr\" (UniqueName: \"kubernetes.io/projected/08159d04-9fcc-467a-9714-94250be5398f-kube-api-access-dffpr\") pod \"community-operators-r5wmw\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.660015 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-catalog-content\") pod \"community-operators-r5wmw\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.761800 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-catalog-content\") pod \"community-operators-r5wmw\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.761931 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-utilities\") pod \"community-operators-r5wmw\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.762020 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dffpr\" (UniqueName: \"kubernetes.io/projected/08159d04-9fcc-467a-9714-94250be5398f-kube-api-access-dffpr\") pod \"community-operators-r5wmw\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.762470 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-catalog-content\") pod \"community-operators-r5wmw\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.762600 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-utilities\") pod \"community-operators-r5wmw\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.772071 4922 generic.go:334] "Generic (PLEG): container finished" podID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerID="912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f" exitCode=0 Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.772139 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rns9m" event={"ID":"d26af53c-85d7-4665-bbb6-3613c8db247f","Type":"ContainerDied","Data":"912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f"} Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.772192 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rns9m" event={"ID":"d26af53c-85d7-4665-bbb6-3613c8db247f","Type":"ContainerStarted","Data":"7898c55b92caefc99888bd333dba69a7cf7f069883ec36237d6de6c7a952988a"} Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.783718 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dffpr\" (UniqueName: \"kubernetes.io/projected/08159d04-9fcc-467a-9714-94250be5398f-kube-api-access-dffpr\") pod \"community-operators-r5wmw\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:09 crc kubenswrapper[4922]: I0930 00:29:09.941838 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:10 crc kubenswrapper[4922]: I0930 00:29:10.488058 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r5wmw"] Sep 30 00:29:10 crc kubenswrapper[4922]: I0930 00:29:10.797971 4922 generic.go:334] "Generic (PLEG): container finished" podID="08159d04-9fcc-467a-9714-94250be5398f" containerID="42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe" exitCode=0 Sep 30 00:29:10 crc kubenswrapper[4922]: I0930 00:29:10.798260 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5wmw" event={"ID":"08159d04-9fcc-467a-9714-94250be5398f","Type":"ContainerDied","Data":"42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe"} Sep 30 00:29:10 crc kubenswrapper[4922]: I0930 00:29:10.798289 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5wmw" event={"ID":"08159d04-9fcc-467a-9714-94250be5398f","Type":"ContainerStarted","Data":"006f610d725f277bb24d26aff601857aa6450f43eb562b8f9339a0336de074b3"} Sep 30 00:29:11 crc kubenswrapper[4922]: I0930 00:29:11.813219 4922 generic.go:334] "Generic (PLEG): container finished" podID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerID="6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7" exitCode=0 Sep 30 00:29:11 crc kubenswrapper[4922]: I0930 00:29:11.813267 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rns9m" event={"ID":"d26af53c-85d7-4665-bbb6-3613c8db247f","Type":"ContainerDied","Data":"6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7"} Sep 30 00:29:12 crc kubenswrapper[4922]: I0930 00:29:12.828225 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5wmw" event={"ID":"08159d04-9fcc-467a-9714-94250be5398f","Type":"ContainerStarted","Data":"97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1"} Sep 30 00:29:13 crc kubenswrapper[4922]: I0930 00:29:13.850143 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rns9m" event={"ID":"d26af53c-85d7-4665-bbb6-3613c8db247f","Type":"ContainerStarted","Data":"f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275"} Sep 30 00:29:13 crc kubenswrapper[4922]: I0930 00:29:13.882118 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rns9m" podStartSLOduration=2.993620672 podStartE2EDuration="5.882103178s" podCreationTimestamp="2025-09-30 00:29:08 +0000 UTC" firstStartedPulling="2025-09-30 00:29:09.777787676 +0000 UTC m=+7354.088076499" lastFinishedPulling="2025-09-30 00:29:12.666270182 +0000 UTC m=+7356.976559005" observedRunningTime="2025-09-30 00:29:13.879801721 +0000 UTC m=+7358.190090534" watchObservedRunningTime="2025-09-30 00:29:13.882103178 +0000 UTC m=+7358.192391991" Sep 30 00:29:14 crc kubenswrapper[4922]: I0930 00:29:14.863326 4922 generic.go:334] "Generic (PLEG): container finished" podID="08159d04-9fcc-467a-9714-94250be5398f" containerID="97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1" exitCode=0 Sep 30 00:29:14 crc kubenswrapper[4922]: I0930 00:29:14.863458 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5wmw" event={"ID":"08159d04-9fcc-467a-9714-94250be5398f","Type":"ContainerDied","Data":"97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1"} Sep 30 00:29:15 crc kubenswrapper[4922]: I0930 00:29:15.873901 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5wmw" event={"ID":"08159d04-9fcc-467a-9714-94250be5398f","Type":"ContainerStarted","Data":"b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe"} Sep 30 00:29:15 crc kubenswrapper[4922]: I0930 00:29:15.921143 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r5wmw" podStartSLOduration=2.228048217 podStartE2EDuration="6.921123245s" podCreationTimestamp="2025-09-30 00:29:09 +0000 UTC" firstStartedPulling="2025-09-30 00:29:10.803550033 +0000 UTC m=+7355.113838846" lastFinishedPulling="2025-09-30 00:29:15.496625051 +0000 UTC m=+7359.806913874" observedRunningTime="2025-09-30 00:29:15.917164678 +0000 UTC m=+7360.227453561" watchObservedRunningTime="2025-09-30 00:29:15.921123245 +0000 UTC m=+7360.231412068" Sep 30 00:29:18 crc kubenswrapper[4922]: I0930 00:29:18.742996 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:18 crc kubenswrapper[4922]: I0930 00:29:18.743591 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:18 crc kubenswrapper[4922]: I0930 00:29:18.830753 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:18 crc kubenswrapper[4922]: I0930 00:29:18.968645 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:19 crc kubenswrapper[4922]: I0930 00:29:19.942502 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:19 crc kubenswrapper[4922]: I0930 00:29:19.943002 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:19 crc kubenswrapper[4922]: I0930 00:29:19.981638 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rns9m"] Sep 30 00:29:20 crc kubenswrapper[4922]: I0930 00:29:20.037813 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:20 crc kubenswrapper[4922]: I0930 00:29:20.932868 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rns9m" podUID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerName="registry-server" containerID="cri-o://f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275" gracePeriod=2 Sep 30 00:29:20 crc kubenswrapper[4922]: I0930 00:29:20.990472 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.503103 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.678734 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkj9g\" (UniqueName: \"kubernetes.io/projected/d26af53c-85d7-4665-bbb6-3613c8db247f-kube-api-access-nkj9g\") pod \"d26af53c-85d7-4665-bbb6-3613c8db247f\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.678899 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-catalog-content\") pod \"d26af53c-85d7-4665-bbb6-3613c8db247f\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.679074 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-utilities\") pod \"d26af53c-85d7-4665-bbb6-3613c8db247f\" (UID: \"d26af53c-85d7-4665-bbb6-3613c8db247f\") " Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.680160 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-utilities" (OuterVolumeSpecName: "utilities") pod "d26af53c-85d7-4665-bbb6-3613c8db247f" (UID: "d26af53c-85d7-4665-bbb6-3613c8db247f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.691428 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d26af53c-85d7-4665-bbb6-3613c8db247f-kube-api-access-nkj9g" (OuterVolumeSpecName: "kube-api-access-nkj9g") pod "d26af53c-85d7-4665-bbb6-3613c8db247f" (UID: "d26af53c-85d7-4665-bbb6-3613c8db247f"). InnerVolumeSpecName "kube-api-access-nkj9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.710842 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d26af53c-85d7-4665-bbb6-3613c8db247f" (UID: "d26af53c-85d7-4665-bbb6-3613c8db247f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.781778 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.781832 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkj9g\" (UniqueName: \"kubernetes.io/projected/d26af53c-85d7-4665-bbb6-3613c8db247f-kube-api-access-nkj9g\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.781857 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d26af53c-85d7-4665-bbb6-3613c8db247f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.945020 4922 generic.go:334] "Generic (PLEG): container finished" podID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerID="f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275" exitCode=0 Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.945106 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rns9m" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.945131 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rns9m" event={"ID":"d26af53c-85d7-4665-bbb6-3613c8db247f","Type":"ContainerDied","Data":"f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275"} Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.945698 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rns9m" event={"ID":"d26af53c-85d7-4665-bbb6-3613c8db247f","Type":"ContainerDied","Data":"7898c55b92caefc99888bd333dba69a7cf7f069883ec36237d6de6c7a952988a"} Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.945747 4922 scope.go:117] "RemoveContainer" containerID="f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.982801 4922 scope.go:117] "RemoveContainer" containerID="6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7" Sep 30 00:29:21 crc kubenswrapper[4922]: I0930 00:29:21.989779 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rns9m"] Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.000652 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rns9m"] Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.018099 4922 scope.go:117] "RemoveContainer" containerID="912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f" Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.078135 4922 scope.go:117] "RemoveContainer" containerID="f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275" Sep 30 00:29:22 crc kubenswrapper[4922]: E0930 00:29:22.078621 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275\": container with ID starting with f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275 not found: ID does not exist" containerID="f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275" Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.078692 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275"} err="failed to get container status \"f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275\": rpc error: code = NotFound desc = could not find container \"f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275\": container with ID starting with f4a412c0393d0fb529e33551c20348376ea13418808e5361e9af58bc51907275 not found: ID does not exist" Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.078714 4922 scope.go:117] "RemoveContainer" containerID="6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7" Sep 30 00:29:22 crc kubenswrapper[4922]: E0930 00:29:22.079069 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7\": container with ID starting with 6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7 not found: ID does not exist" containerID="6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7" Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.079094 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7"} err="failed to get container status \"6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7\": rpc error: code = NotFound desc = could not find container \"6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7\": container with ID starting with 6ddf6914be29d90eb72648d6d5b9cd588ff05798e2ae7180d93603bead86c3b7 not found: ID does not exist" Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.079106 4922 scope.go:117] "RemoveContainer" containerID="912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f" Sep 30 00:29:22 crc kubenswrapper[4922]: E0930 00:29:22.079497 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f\": container with ID starting with 912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f not found: ID does not exist" containerID="912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f" Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.079535 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f"} err="failed to get container status \"912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f\": rpc error: code = NotFound desc = could not find container \"912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f\": container with ID starting with 912aaf438d16229c3633ed83f3c9a23815eaa2833da0dd6d3ce5528a4432746f not found: ID does not exist" Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.376200 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r5wmw"] Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.442173 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d26af53c-85d7-4665-bbb6-3613c8db247f" path="/var/lib/kubelet/pods/d26af53c-85d7-4665-bbb6-3613c8db247f/volumes" Sep 30 00:29:22 crc kubenswrapper[4922]: I0930 00:29:22.965237 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-r5wmw" podUID="08159d04-9fcc-467a-9714-94250be5398f" containerName="registry-server" containerID="cri-o://b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe" gracePeriod=2 Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.462103 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.624090 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dffpr\" (UniqueName: \"kubernetes.io/projected/08159d04-9fcc-467a-9714-94250be5398f-kube-api-access-dffpr\") pod \"08159d04-9fcc-467a-9714-94250be5398f\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.624242 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-catalog-content\") pod \"08159d04-9fcc-467a-9714-94250be5398f\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.624319 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-utilities\") pod \"08159d04-9fcc-467a-9714-94250be5398f\" (UID: \"08159d04-9fcc-467a-9714-94250be5398f\") " Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.626726 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-utilities" (OuterVolumeSpecName: "utilities") pod "08159d04-9fcc-467a-9714-94250be5398f" (UID: "08159d04-9fcc-467a-9714-94250be5398f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.631777 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08159d04-9fcc-467a-9714-94250be5398f-kube-api-access-dffpr" (OuterVolumeSpecName: "kube-api-access-dffpr") pod "08159d04-9fcc-467a-9714-94250be5398f" (UID: "08159d04-9fcc-467a-9714-94250be5398f"). InnerVolumeSpecName "kube-api-access-dffpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.688485 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08159d04-9fcc-467a-9714-94250be5398f" (UID: "08159d04-9fcc-467a-9714-94250be5398f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.727029 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dffpr\" (UniqueName: \"kubernetes.io/projected/08159d04-9fcc-467a-9714-94250be5398f-kube-api-access-dffpr\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.727071 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.727084 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08159d04-9fcc-467a-9714-94250be5398f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.994291 4922 generic.go:334] "Generic (PLEG): container finished" podID="08159d04-9fcc-467a-9714-94250be5398f" containerID="b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe" exitCode=0 Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.994367 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5wmw" event={"ID":"08159d04-9fcc-467a-9714-94250be5398f","Type":"ContainerDied","Data":"b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe"} Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.994462 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r5wmw" event={"ID":"08159d04-9fcc-467a-9714-94250be5398f","Type":"ContainerDied","Data":"006f610d725f277bb24d26aff601857aa6450f43eb562b8f9339a0336de074b3"} Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.994498 4922 scope.go:117] "RemoveContainer" containerID="b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe" Sep 30 00:29:23 crc kubenswrapper[4922]: I0930 00:29:23.995666 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r5wmw" Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.045212 4922 scope.go:117] "RemoveContainer" containerID="97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1" Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.086618 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r5wmw"] Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.094131 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-r5wmw"] Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.096501 4922 scope.go:117] "RemoveContainer" containerID="42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe" Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.131622 4922 scope.go:117] "RemoveContainer" containerID="b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe" Sep 30 00:29:24 crc kubenswrapper[4922]: E0930 00:29:24.132162 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe\": container with ID starting with b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe not found: ID does not exist" containerID="b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe" Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.132220 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe"} err="failed to get container status \"b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe\": rpc error: code = NotFound desc = could not find container \"b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe\": container with ID starting with b046e2c0c8d1d3221e465bfca9bad7f26ce60c2d0aeab67c60cdc6171b3991fe not found: ID does not exist" Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.132328 4922 scope.go:117] "RemoveContainer" containerID="97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1" Sep 30 00:29:24 crc kubenswrapper[4922]: E0930 00:29:24.132929 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1\": container with ID starting with 97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1 not found: ID does not exist" containerID="97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1" Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.133018 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1"} err="failed to get container status \"97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1\": rpc error: code = NotFound desc = could not find container \"97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1\": container with ID starting with 97c38797ae92ea20c5130ca2196809dc2af4ffe6817676fd254893c4f477e2c1 not found: ID does not exist" Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.133062 4922 scope.go:117] "RemoveContainer" containerID="42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe" Sep 30 00:29:24 crc kubenswrapper[4922]: E0930 00:29:24.133567 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe\": container with ID starting with 42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe not found: ID does not exist" containerID="42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe" Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.133599 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe"} err="failed to get container status \"42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe\": rpc error: code = NotFound desc = could not find container \"42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe\": container with ID starting with 42bf320f75359880208c8b2b9ed64f3512aea2d8debf3ab6b522b04e731ec9fe not found: ID does not exist" Sep 30 00:29:24 crc kubenswrapper[4922]: I0930 00:29:24.442978 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08159d04-9fcc-467a-9714-94250be5398f" path="/var/lib/kubelet/pods/08159d04-9fcc-467a-9714-94250be5398f/volumes" Sep 30 00:29:28 crc kubenswrapper[4922]: I0930 00:29:28.913297 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:29:28 crc kubenswrapper[4922]: I0930 00:29:28.914012 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:29:58 crc kubenswrapper[4922]: I0930 00:29:58.912990 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:29:58 crc kubenswrapper[4922]: I0930 00:29:58.913691 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.148619 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78"] Sep 30 00:30:00 crc kubenswrapper[4922]: E0930 00:30:00.149478 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08159d04-9fcc-467a-9714-94250be5398f" containerName="extract-utilities" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.149494 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="08159d04-9fcc-467a-9714-94250be5398f" containerName="extract-utilities" Sep 30 00:30:00 crc kubenswrapper[4922]: E0930 00:30:00.149512 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerName="extract-utilities" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.149518 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerName="extract-utilities" Sep 30 00:30:00 crc kubenswrapper[4922]: E0930 00:30:00.149530 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerName="registry-server" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.149537 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerName="registry-server" Sep 30 00:30:00 crc kubenswrapper[4922]: E0930 00:30:00.149552 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08159d04-9fcc-467a-9714-94250be5398f" containerName="extract-content" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.149557 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="08159d04-9fcc-467a-9714-94250be5398f" containerName="extract-content" Sep 30 00:30:00 crc kubenswrapper[4922]: E0930 00:30:00.149573 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08159d04-9fcc-467a-9714-94250be5398f" containerName="registry-server" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.149579 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="08159d04-9fcc-467a-9714-94250be5398f" containerName="registry-server" Sep 30 00:30:00 crc kubenswrapper[4922]: E0930 00:30:00.149592 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerName="extract-content" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.149598 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerName="extract-content" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.149834 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d26af53c-85d7-4665-bbb6-3613c8db247f" containerName="registry-server" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.149856 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="08159d04-9fcc-467a-9714-94250be5398f" containerName="registry-server" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.151249 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.154124 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.154498 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.161836 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78"] Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.214306 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c858009c-8094-4aec-925d-fa1cc18fb120-secret-volume\") pod \"collect-profiles-29319870-ghq78\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.214380 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c858009c-8094-4aec-925d-fa1cc18fb120-config-volume\") pod \"collect-profiles-29319870-ghq78\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.215145 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6tkj\" (UniqueName: \"kubernetes.io/projected/c858009c-8094-4aec-925d-fa1cc18fb120-kube-api-access-n6tkj\") pod \"collect-profiles-29319870-ghq78\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.317293 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c858009c-8094-4aec-925d-fa1cc18fb120-secret-volume\") pod \"collect-profiles-29319870-ghq78\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.318227 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c858009c-8094-4aec-925d-fa1cc18fb120-config-volume\") pod \"collect-profiles-29319870-ghq78\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.318272 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c858009c-8094-4aec-925d-fa1cc18fb120-config-volume\") pod \"collect-profiles-29319870-ghq78\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.318371 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6tkj\" (UniqueName: \"kubernetes.io/projected/c858009c-8094-4aec-925d-fa1cc18fb120-kube-api-access-n6tkj\") pod \"collect-profiles-29319870-ghq78\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.328132 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c858009c-8094-4aec-925d-fa1cc18fb120-secret-volume\") pod \"collect-profiles-29319870-ghq78\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.342056 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6tkj\" (UniqueName: \"kubernetes.io/projected/c858009c-8094-4aec-925d-fa1cc18fb120-kube-api-access-n6tkj\") pod \"collect-profiles-29319870-ghq78\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.478834 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:00 crc kubenswrapper[4922]: I0930 00:30:00.991570 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78"] Sep 30 00:30:01 crc kubenswrapper[4922]: W0930 00:30:01.003379 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc858009c_8094_4aec_925d_fa1cc18fb120.slice/crio-dac50905baf659520f6b0a4231013b9c5791b4831a2541ba144fd97e243d32a1 WatchSource:0}: Error finding container dac50905baf659520f6b0a4231013b9c5791b4831a2541ba144fd97e243d32a1: Status 404 returned error can't find the container with id dac50905baf659520f6b0a4231013b9c5791b4831a2541ba144fd97e243d32a1 Sep 30 00:30:01 crc kubenswrapper[4922]: I0930 00:30:01.429025 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" event={"ID":"c858009c-8094-4aec-925d-fa1cc18fb120","Type":"ContainerStarted","Data":"236058568fa50ce7e0e1bcba3135a4823f93efe90c7a11f8dfd63a1aa8b1bd8d"} Sep 30 00:30:01 crc kubenswrapper[4922]: I0930 00:30:01.429430 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" event={"ID":"c858009c-8094-4aec-925d-fa1cc18fb120","Type":"ContainerStarted","Data":"dac50905baf659520f6b0a4231013b9c5791b4831a2541ba144fd97e243d32a1"} Sep 30 00:30:01 crc kubenswrapper[4922]: I0930 00:30:01.449565 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" podStartSLOduration=1.449543753 podStartE2EDuration="1.449543753s" podCreationTimestamp="2025-09-30 00:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:30:01.441579167 +0000 UTC m=+7405.751867980" watchObservedRunningTime="2025-09-30 00:30:01.449543753 +0000 UTC m=+7405.759832566" Sep 30 00:30:02 crc kubenswrapper[4922]: I0930 00:30:02.446013 4922 generic.go:334] "Generic (PLEG): container finished" podID="c858009c-8094-4aec-925d-fa1cc18fb120" containerID="236058568fa50ce7e0e1bcba3135a4823f93efe90c7a11f8dfd63a1aa8b1bd8d" exitCode=0 Sep 30 00:30:02 crc kubenswrapper[4922]: I0930 00:30:02.446980 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" event={"ID":"c858009c-8094-4aec-925d-fa1cc18fb120","Type":"ContainerDied","Data":"236058568fa50ce7e0e1bcba3135a4823f93efe90c7a11f8dfd63a1aa8b1bd8d"} Sep 30 00:30:03 crc kubenswrapper[4922]: I0930 00:30:03.880575 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.002562 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6tkj\" (UniqueName: \"kubernetes.io/projected/c858009c-8094-4aec-925d-fa1cc18fb120-kube-api-access-n6tkj\") pod \"c858009c-8094-4aec-925d-fa1cc18fb120\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.002816 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c858009c-8094-4aec-925d-fa1cc18fb120-secret-volume\") pod \"c858009c-8094-4aec-925d-fa1cc18fb120\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.003179 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c858009c-8094-4aec-925d-fa1cc18fb120-config-volume\") pod \"c858009c-8094-4aec-925d-fa1cc18fb120\" (UID: \"c858009c-8094-4aec-925d-fa1cc18fb120\") " Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.004262 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c858009c-8094-4aec-925d-fa1cc18fb120-config-volume" (OuterVolumeSpecName: "config-volume") pod "c858009c-8094-4aec-925d-fa1cc18fb120" (UID: "c858009c-8094-4aec-925d-fa1cc18fb120"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.008483 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c858009c-8094-4aec-925d-fa1cc18fb120-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c858009c-8094-4aec-925d-fa1cc18fb120" (UID: "c858009c-8094-4aec-925d-fa1cc18fb120"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.009601 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c858009c-8094-4aec-925d-fa1cc18fb120-kube-api-access-n6tkj" (OuterVolumeSpecName: "kube-api-access-n6tkj") pod "c858009c-8094-4aec-925d-fa1cc18fb120" (UID: "c858009c-8094-4aec-925d-fa1cc18fb120"). InnerVolumeSpecName "kube-api-access-n6tkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.106123 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c858009c-8094-4aec-925d-fa1cc18fb120-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.106161 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c858009c-8094-4aec-925d-fa1cc18fb120-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.106175 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6tkj\" (UniqueName: \"kubernetes.io/projected/c858009c-8094-4aec-925d-fa1cc18fb120-kube-api-access-n6tkj\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.489327 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" event={"ID":"c858009c-8094-4aec-925d-fa1cc18fb120","Type":"ContainerDied","Data":"dac50905baf659520f6b0a4231013b9c5791b4831a2541ba144fd97e243d32a1"} Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.489773 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dac50905baf659520f6b0a4231013b9c5791b4831a2541ba144fd97e243d32a1" Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.489503 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78" Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.539594 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq"] Sep 30 00:30:04 crc kubenswrapper[4922]: I0930 00:30:04.550951 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319825-b5njq"] Sep 30 00:30:06 crc kubenswrapper[4922]: I0930 00:30:06.442863 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="769ecc0b-2d3a-433d-b206-55953f8a6169" path="/var/lib/kubelet/pods/769ecc0b-2d3a-433d-b206-55953f8a6169/volumes" Sep 30 00:30:28 crc kubenswrapper[4922]: I0930 00:30:28.913298 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:30:28 crc kubenswrapper[4922]: I0930 00:30:28.914122 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:30:28 crc kubenswrapper[4922]: I0930 00:30:28.914190 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:30:28 crc kubenswrapper[4922]: I0930 00:30:28.915118 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:30:28 crc kubenswrapper[4922]: I0930 00:30:28.915181 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" gracePeriod=600 Sep 30 00:30:29 crc kubenswrapper[4922]: E0930 00:30:29.042123 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:30:29 crc kubenswrapper[4922]: I0930 00:30:29.787296 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" exitCode=0 Sep 30 00:30:29 crc kubenswrapper[4922]: I0930 00:30:29.787374 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6"} Sep 30 00:30:29 crc kubenswrapper[4922]: I0930 00:30:29.787934 4922 scope.go:117] "RemoveContainer" containerID="1322df6e2f654d152f0278d46a5ceecb4cd172fb6f73a71644be5c0a0d23a702" Sep 30 00:30:29 crc kubenswrapper[4922]: I0930 00:30:29.789435 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:30:29 crc kubenswrapper[4922]: E0930 00:30:29.789959 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:30:37 crc kubenswrapper[4922]: I0930 00:30:37.895034 4922 generic.go:334] "Generic (PLEG): container finished" podID="3d055d48-2bce-49dd-948e-03f2a6e95282" containerID="176e9387acde67ccf5206bd96ca915d368618b3911b602e90fa41daf83bca032" exitCode=0 Sep 30 00:30:37 crc kubenswrapper[4922]: I0930 00:30:37.895112 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" event={"ID":"3d055d48-2bce-49dd-948e-03f2a6e95282","Type":"ContainerDied","Data":"176e9387acde67ccf5206bd96ca915d368618b3911b602e90fa41daf83bca032"} Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.457497 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.597336 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ceph\") pod \"3d055d48-2bce-49dd-948e-03f2a6e95282\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.597583 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-inventory\") pod \"3d055d48-2bce-49dd-948e-03f2a6e95282\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.597677 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-bootstrap-combined-ca-bundle\") pod \"3d055d48-2bce-49dd-948e-03f2a6e95282\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.597748 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmlgm\" (UniqueName: \"kubernetes.io/projected/3d055d48-2bce-49dd-948e-03f2a6e95282-kube-api-access-vmlgm\") pod \"3d055d48-2bce-49dd-948e-03f2a6e95282\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.597800 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ssh-key\") pod \"3d055d48-2bce-49dd-948e-03f2a6e95282\" (UID: \"3d055d48-2bce-49dd-948e-03f2a6e95282\") " Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.606716 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ceph" (OuterVolumeSpecName: "ceph") pod "3d055d48-2bce-49dd-948e-03f2a6e95282" (UID: "3d055d48-2bce-49dd-948e-03f2a6e95282"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.606820 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3d055d48-2bce-49dd-948e-03f2a6e95282" (UID: "3d055d48-2bce-49dd-948e-03f2a6e95282"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.629664 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d055d48-2bce-49dd-948e-03f2a6e95282-kube-api-access-vmlgm" (OuterVolumeSpecName: "kube-api-access-vmlgm") pod "3d055d48-2bce-49dd-948e-03f2a6e95282" (UID: "3d055d48-2bce-49dd-948e-03f2a6e95282"). InnerVolumeSpecName "kube-api-access-vmlgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.642424 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3d055d48-2bce-49dd-948e-03f2a6e95282" (UID: "3d055d48-2bce-49dd-948e-03f2a6e95282"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.646632 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-inventory" (OuterVolumeSpecName: "inventory") pod "3d055d48-2bce-49dd-948e-03f2a6e95282" (UID: "3d055d48-2bce-49dd-948e-03f2a6e95282"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.700987 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmlgm\" (UniqueName: \"kubernetes.io/projected/3d055d48-2bce-49dd-948e-03f2a6e95282-kube-api-access-vmlgm\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.701057 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.701077 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.701094 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.701112 4922 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d055d48-2bce-49dd-948e-03f2a6e95282-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.924863 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" event={"ID":"3d055d48-2bce-49dd-948e-03f2a6e95282","Type":"ContainerDied","Data":"19a580d2814958ef2c37e3bd84da61fa6f0d8ab56b4f006cf124f3837ca6d309"} Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.924959 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19a580d2814958ef2c37e3bd84da61fa6f0d8ab56b4f006cf124f3837ca6d309" Sep 30 00:30:39 crc kubenswrapper[4922]: I0930 00:30:39.924962 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-gjr5h" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.031512 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-mgrb9"] Sep 30 00:30:40 crc kubenswrapper[4922]: E0930 00:30:40.032025 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c858009c-8094-4aec-925d-fa1cc18fb120" containerName="collect-profiles" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.032043 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c858009c-8094-4aec-925d-fa1cc18fb120" containerName="collect-profiles" Sep 30 00:30:40 crc kubenswrapper[4922]: E0930 00:30:40.032062 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d055d48-2bce-49dd-948e-03f2a6e95282" containerName="bootstrap-openstack-openstack-cell1" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.032071 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d055d48-2bce-49dd-948e-03f2a6e95282" containerName="bootstrap-openstack-openstack-cell1" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.032415 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d055d48-2bce-49dd-948e-03f2a6e95282" containerName="bootstrap-openstack-openstack-cell1" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.032454 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c858009c-8094-4aec-925d-fa1cc18fb120" containerName="collect-profiles" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.033431 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.037433 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.037699 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.037887 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.038034 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.054007 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-mgrb9"] Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.214156 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gq7m\" (UniqueName: \"kubernetes.io/projected/21204209-381f-4ebb-ae8a-70bb06b43690-kube-api-access-4gq7m\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.214358 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ssh-key\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.214470 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ceph\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.214530 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-inventory\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.317030 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gq7m\" (UniqueName: \"kubernetes.io/projected/21204209-381f-4ebb-ae8a-70bb06b43690-kube-api-access-4gq7m\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.317169 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ssh-key\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.317250 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ceph\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.317301 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-inventory\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.322038 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ceph\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.324836 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ssh-key\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.327492 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-inventory\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.342980 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gq7m\" (UniqueName: \"kubernetes.io/projected/21204209-381f-4ebb-ae8a-70bb06b43690-kube-api-access-4gq7m\") pod \"download-cache-openstack-openstack-cell1-mgrb9\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.366548 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.424579 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:30:40 crc kubenswrapper[4922]: E0930 00:30:40.428259 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:30:40 crc kubenswrapper[4922]: I0930 00:30:40.936459 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-mgrb9"] Sep 30 00:30:41 crc kubenswrapper[4922]: I0930 00:30:41.947091 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" event={"ID":"21204209-381f-4ebb-ae8a-70bb06b43690","Type":"ContainerStarted","Data":"d1c91f85a7d3daa6f6252cc1074bcd390be6e62881e75d5d9008e771ee8e9409"} Sep 30 00:30:41 crc kubenswrapper[4922]: I0930 00:30:41.947522 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" event={"ID":"21204209-381f-4ebb-ae8a-70bb06b43690","Type":"ContainerStarted","Data":"d6d707f21a26e2bf4e95bfccdca7838379b4e82646e500a5a892a25982e8fdf6"} Sep 30 00:30:42 crc kubenswrapper[4922]: I0930 00:30:42.008349 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" podStartSLOduration=2.796533221 podStartE2EDuration="3.008331937s" podCreationTimestamp="2025-09-30 00:30:39 +0000 UTC" firstStartedPulling="2025-09-30 00:30:40.943451882 +0000 UTC m=+7445.253740695" lastFinishedPulling="2025-09-30 00:30:41.155250598 +0000 UTC m=+7445.465539411" observedRunningTime="2025-09-30 00:30:41.999303674 +0000 UTC m=+7446.309592517" watchObservedRunningTime="2025-09-30 00:30:42.008331937 +0000 UTC m=+7446.318620750" Sep 30 00:30:53 crc kubenswrapper[4922]: I0930 00:30:53.422522 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:30:53 crc kubenswrapper[4922]: E0930 00:30:53.423541 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:30:58 crc kubenswrapper[4922]: I0930 00:30:58.593536 4922 scope.go:117] "RemoveContainer" containerID="4076f5294483d8b1605d5bb09fac961a9b4d0ad00df19af05e9a7d86edef49b4" Sep 30 00:31:07 crc kubenswrapper[4922]: I0930 00:31:07.422705 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:31:07 crc kubenswrapper[4922]: E0930 00:31:07.423773 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:31:19 crc kubenswrapper[4922]: I0930 00:31:19.422121 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:31:19 crc kubenswrapper[4922]: E0930 00:31:19.422747 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:31:31 crc kubenswrapper[4922]: I0930 00:31:31.421880 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:31:31 crc kubenswrapper[4922]: E0930 00:31:31.423142 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:31:46 crc kubenswrapper[4922]: I0930 00:31:46.434859 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:31:46 crc kubenswrapper[4922]: E0930 00:31:46.436138 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:32:00 crc kubenswrapper[4922]: I0930 00:32:00.422197 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:32:00 crc kubenswrapper[4922]: E0930 00:32:00.423106 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:32:14 crc kubenswrapper[4922]: I0930 00:32:14.422190 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:32:14 crc kubenswrapper[4922]: E0930 00:32:14.423314 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:32:15 crc kubenswrapper[4922]: I0930 00:32:15.038196 4922 generic.go:334] "Generic (PLEG): container finished" podID="21204209-381f-4ebb-ae8a-70bb06b43690" containerID="d1c91f85a7d3daa6f6252cc1074bcd390be6e62881e75d5d9008e771ee8e9409" exitCode=0 Sep 30 00:32:15 crc kubenswrapper[4922]: I0930 00:32:15.038297 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" event={"ID":"21204209-381f-4ebb-ae8a-70bb06b43690","Type":"ContainerDied","Data":"d1c91f85a7d3daa6f6252cc1074bcd390be6e62881e75d5d9008e771ee8e9409"} Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.545104 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.659257 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-inventory\") pod \"21204209-381f-4ebb-ae8a-70bb06b43690\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.659325 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ssh-key\") pod \"21204209-381f-4ebb-ae8a-70bb06b43690\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.659351 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ceph\") pod \"21204209-381f-4ebb-ae8a-70bb06b43690\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.659532 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gq7m\" (UniqueName: \"kubernetes.io/projected/21204209-381f-4ebb-ae8a-70bb06b43690-kube-api-access-4gq7m\") pod \"21204209-381f-4ebb-ae8a-70bb06b43690\" (UID: \"21204209-381f-4ebb-ae8a-70bb06b43690\") " Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.664826 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21204209-381f-4ebb-ae8a-70bb06b43690-kube-api-access-4gq7m" (OuterVolumeSpecName: "kube-api-access-4gq7m") pod "21204209-381f-4ebb-ae8a-70bb06b43690" (UID: "21204209-381f-4ebb-ae8a-70bb06b43690"). InnerVolumeSpecName "kube-api-access-4gq7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.666556 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ceph" (OuterVolumeSpecName: "ceph") pod "21204209-381f-4ebb-ae8a-70bb06b43690" (UID: "21204209-381f-4ebb-ae8a-70bb06b43690"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.688342 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "21204209-381f-4ebb-ae8a-70bb06b43690" (UID: "21204209-381f-4ebb-ae8a-70bb06b43690"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.692080 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-inventory" (OuterVolumeSpecName: "inventory") pod "21204209-381f-4ebb-ae8a-70bb06b43690" (UID: "21204209-381f-4ebb-ae8a-70bb06b43690"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.762225 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.762270 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.762309 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/21204209-381f-4ebb-ae8a-70bb06b43690-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:16 crc kubenswrapper[4922]: I0930 00:32:16.762321 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gq7m\" (UniqueName: \"kubernetes.io/projected/21204209-381f-4ebb-ae8a-70bb06b43690-kube-api-access-4gq7m\") on node \"crc\" DevicePath \"\"" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.064817 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" event={"ID":"21204209-381f-4ebb-ae8a-70bb06b43690","Type":"ContainerDied","Data":"d6d707f21a26e2bf4e95bfccdca7838379b4e82646e500a5a892a25982e8fdf6"} Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.065130 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6d707f21a26e2bf4e95bfccdca7838379b4e82646e500a5a892a25982e8fdf6" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.064946 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-mgrb9" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.177240 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-5c2qn"] Sep 30 00:32:17 crc kubenswrapper[4922]: E0930 00:32:17.177908 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21204209-381f-4ebb-ae8a-70bb06b43690" containerName="download-cache-openstack-openstack-cell1" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.177936 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="21204209-381f-4ebb-ae8a-70bb06b43690" containerName="download-cache-openstack-openstack-cell1" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.178311 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="21204209-381f-4ebb-ae8a-70bb06b43690" containerName="download-cache-openstack-openstack-cell1" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.179732 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.183971 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.184072 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.184245 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.184288 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.208698 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-5c2qn"] Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.274062 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnf8z\" (UniqueName: \"kubernetes.io/projected/99cb0060-861c-4fb7-9414-fe4575595fe7-kube-api-access-dnf8z\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.274195 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ssh-key\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.274678 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ceph\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.274996 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-inventory\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.377554 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnf8z\" (UniqueName: \"kubernetes.io/projected/99cb0060-861c-4fb7-9414-fe4575595fe7-kube-api-access-dnf8z\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.377660 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ssh-key\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.377874 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ceph\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.378043 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-inventory\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.385338 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-inventory\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.385446 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ssh-key\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.392356 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ceph\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.395044 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnf8z\" (UniqueName: \"kubernetes.io/projected/99cb0060-861c-4fb7-9414-fe4575595fe7-kube-api-access-dnf8z\") pod \"configure-network-openstack-openstack-cell1-5c2qn\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:17 crc kubenswrapper[4922]: I0930 00:32:17.516256 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:32:18 crc kubenswrapper[4922]: I0930 00:32:18.091885 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-5c2qn"] Sep 30 00:32:18 crc kubenswrapper[4922]: W0930 00:32:18.097761 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99cb0060_861c_4fb7_9414_fe4575595fe7.slice/crio-c92c1b2baf76f5236c4cfd6204e7ebde13baedad237768fe7c9a4f67543faa16 WatchSource:0}: Error finding container c92c1b2baf76f5236c4cfd6204e7ebde13baedad237768fe7c9a4f67543faa16: Status 404 returned error can't find the container with id c92c1b2baf76f5236c4cfd6204e7ebde13baedad237768fe7c9a4f67543faa16 Sep 30 00:32:18 crc kubenswrapper[4922]: I0930 00:32:18.101608 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:32:19 crc kubenswrapper[4922]: I0930 00:32:19.087705 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" event={"ID":"99cb0060-861c-4fb7-9414-fe4575595fe7","Type":"ContainerStarted","Data":"f5fdbe02d859531970435fb5c754595f99f581db95f15b35b6d65fb4873169ff"} Sep 30 00:32:19 crc kubenswrapper[4922]: I0930 00:32:19.088064 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" event={"ID":"99cb0060-861c-4fb7-9414-fe4575595fe7","Type":"ContainerStarted","Data":"c92c1b2baf76f5236c4cfd6204e7ebde13baedad237768fe7c9a4f67543faa16"} Sep 30 00:32:19 crc kubenswrapper[4922]: I0930 00:32:19.114811 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" podStartSLOduration=1.888514376 podStartE2EDuration="2.11479312s" podCreationTimestamp="2025-09-30 00:32:17 +0000 UTC" firstStartedPulling="2025-09-30 00:32:18.101223913 +0000 UTC m=+7542.411512746" lastFinishedPulling="2025-09-30 00:32:18.327502637 +0000 UTC m=+7542.637791490" observedRunningTime="2025-09-30 00:32:19.109086149 +0000 UTC m=+7543.419374962" watchObservedRunningTime="2025-09-30 00:32:19.11479312 +0000 UTC m=+7543.425081933" Sep 30 00:32:25 crc kubenswrapper[4922]: I0930 00:32:25.423016 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:32:25 crc kubenswrapper[4922]: E0930 00:32:25.423938 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:32:40 crc kubenswrapper[4922]: I0930 00:32:40.423262 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:32:40 crc kubenswrapper[4922]: E0930 00:32:40.424614 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:32:54 crc kubenswrapper[4922]: I0930 00:32:54.422574 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:32:54 crc kubenswrapper[4922]: E0930 00:32:54.423776 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:33:07 crc kubenswrapper[4922]: I0930 00:33:07.421849 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:33:07 crc kubenswrapper[4922]: E0930 00:33:07.422926 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:33:18 crc kubenswrapper[4922]: I0930 00:33:18.421913 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:33:18 crc kubenswrapper[4922]: E0930 00:33:18.422553 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:33:29 crc kubenswrapper[4922]: I0930 00:33:29.422212 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:33:29 crc kubenswrapper[4922]: E0930 00:33:29.423704 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:33:40 crc kubenswrapper[4922]: I0930 00:33:40.018241 4922 generic.go:334] "Generic (PLEG): container finished" podID="99cb0060-861c-4fb7-9414-fe4575595fe7" containerID="f5fdbe02d859531970435fb5c754595f99f581db95f15b35b6d65fb4873169ff" exitCode=0 Sep 30 00:33:40 crc kubenswrapper[4922]: I0930 00:33:40.018365 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" event={"ID":"99cb0060-861c-4fb7-9414-fe4575595fe7","Type":"ContainerDied","Data":"f5fdbe02d859531970435fb5c754595f99f581db95f15b35b6d65fb4873169ff"} Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.420334 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.557726 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ssh-key\") pod \"99cb0060-861c-4fb7-9414-fe4575595fe7\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.557817 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ceph\") pod \"99cb0060-861c-4fb7-9414-fe4575595fe7\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.557891 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnf8z\" (UniqueName: \"kubernetes.io/projected/99cb0060-861c-4fb7-9414-fe4575595fe7-kube-api-access-dnf8z\") pod \"99cb0060-861c-4fb7-9414-fe4575595fe7\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.557971 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-inventory\") pod \"99cb0060-861c-4fb7-9414-fe4575595fe7\" (UID: \"99cb0060-861c-4fb7-9414-fe4575595fe7\") " Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.564503 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ceph" (OuterVolumeSpecName: "ceph") pod "99cb0060-861c-4fb7-9414-fe4575595fe7" (UID: "99cb0060-861c-4fb7-9414-fe4575595fe7"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.564613 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99cb0060-861c-4fb7-9414-fe4575595fe7-kube-api-access-dnf8z" (OuterVolumeSpecName: "kube-api-access-dnf8z") pod "99cb0060-861c-4fb7-9414-fe4575595fe7" (UID: "99cb0060-861c-4fb7-9414-fe4575595fe7"). InnerVolumeSpecName "kube-api-access-dnf8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.610930 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "99cb0060-861c-4fb7-9414-fe4575595fe7" (UID: "99cb0060-861c-4fb7-9414-fe4575595fe7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.612346 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-inventory" (OuterVolumeSpecName: "inventory") pod "99cb0060-861c-4fb7-9414-fe4575595fe7" (UID: "99cb0060-861c-4fb7-9414-fe4575595fe7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.660983 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnf8z\" (UniqueName: \"kubernetes.io/projected/99cb0060-861c-4fb7-9414-fe4575595fe7-kube-api-access-dnf8z\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.661034 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.661046 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:41 crc kubenswrapper[4922]: I0930 00:33:41.661061 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/99cb0060-861c-4fb7-9414-fe4575595fe7-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.043867 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" event={"ID":"99cb0060-861c-4fb7-9414-fe4575595fe7","Type":"ContainerDied","Data":"c92c1b2baf76f5236c4cfd6204e7ebde13baedad237768fe7c9a4f67543faa16"} Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.044183 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c92c1b2baf76f5236c4cfd6204e7ebde13baedad237768fe7c9a4f67543faa16" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.043900 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-5c2qn" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.120926 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-sdvr7"] Sep 30 00:33:42 crc kubenswrapper[4922]: E0930 00:33:42.121499 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99cb0060-861c-4fb7-9414-fe4575595fe7" containerName="configure-network-openstack-openstack-cell1" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.121523 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="99cb0060-861c-4fb7-9414-fe4575595fe7" containerName="configure-network-openstack-openstack-cell1" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.121793 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="99cb0060-861c-4fb7-9414-fe4575595fe7" containerName="configure-network-openstack-openstack-cell1" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.122845 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.125343 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.125643 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.126258 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.130073 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-sdvr7"] Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.130520 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.275530 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-inventory\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.275743 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ssh-key\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.275805 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qktmw\" (UniqueName: \"kubernetes.io/projected/4798d5ab-15f7-475c-bf7b-8b9b09222f96-kube-api-access-qktmw\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.276155 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ceph\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.378801 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ceph\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.378926 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-inventory\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.379206 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ssh-key\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.379323 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qktmw\" (UniqueName: \"kubernetes.io/projected/4798d5ab-15f7-475c-bf7b-8b9b09222f96-kube-api-access-qktmw\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.384278 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ssh-key\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.391589 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ceph\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.392514 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-inventory\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.405879 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qktmw\" (UniqueName: \"kubernetes.io/projected/4798d5ab-15f7-475c-bf7b-8b9b09222f96-kube-api-access-qktmw\") pod \"validate-network-openstack-openstack-cell1-sdvr7\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.422108 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:33:42 crc kubenswrapper[4922]: E0930 00:33:42.422647 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:33:42 crc kubenswrapper[4922]: I0930 00:33:42.450250 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:43 crc kubenswrapper[4922]: I0930 00:33:43.049621 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-sdvr7"] Sep 30 00:33:44 crc kubenswrapper[4922]: I0930 00:33:44.065469 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" event={"ID":"4798d5ab-15f7-475c-bf7b-8b9b09222f96","Type":"ContainerStarted","Data":"af03f651c0abda394f5fde5c37b159a15257f335da385cbea3e8484ab6e84fb0"} Sep 30 00:33:44 crc kubenswrapper[4922]: I0930 00:33:44.066194 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" event={"ID":"4798d5ab-15f7-475c-bf7b-8b9b09222f96","Type":"ContainerStarted","Data":"6e4850a72c5abcd80d88a9a4a5309bc2b7053fe28afa5ac55a6b0d70714d41e6"} Sep 30 00:33:44 crc kubenswrapper[4922]: I0930 00:33:44.105595 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" podStartSLOduration=1.882712731 podStartE2EDuration="2.105568611s" podCreationTimestamp="2025-09-30 00:33:42 +0000 UTC" firstStartedPulling="2025-09-30 00:33:43.056752233 +0000 UTC m=+7627.367041046" lastFinishedPulling="2025-09-30 00:33:43.279608073 +0000 UTC m=+7627.589896926" observedRunningTime="2025-09-30 00:33:44.08446083 +0000 UTC m=+7628.394749653" watchObservedRunningTime="2025-09-30 00:33:44.105568611 +0000 UTC m=+7628.415857464" Sep 30 00:33:50 crc kubenswrapper[4922]: I0930 00:33:50.129679 4922 generic.go:334] "Generic (PLEG): container finished" podID="4798d5ab-15f7-475c-bf7b-8b9b09222f96" containerID="af03f651c0abda394f5fde5c37b159a15257f335da385cbea3e8484ab6e84fb0" exitCode=0 Sep 30 00:33:50 crc kubenswrapper[4922]: I0930 00:33:50.129768 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" event={"ID":"4798d5ab-15f7-475c-bf7b-8b9b09222f96","Type":"ContainerDied","Data":"af03f651c0abda394f5fde5c37b159a15257f335da385cbea3e8484ab6e84fb0"} Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.660419 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.712917 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-inventory\") pod \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.712957 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qktmw\" (UniqueName: \"kubernetes.io/projected/4798d5ab-15f7-475c-bf7b-8b9b09222f96-kube-api-access-qktmw\") pod \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.713070 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ceph\") pod \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.713251 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ssh-key\") pod \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\" (UID: \"4798d5ab-15f7-475c-bf7b-8b9b09222f96\") " Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.718312 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4798d5ab-15f7-475c-bf7b-8b9b09222f96-kube-api-access-qktmw" (OuterVolumeSpecName: "kube-api-access-qktmw") pod "4798d5ab-15f7-475c-bf7b-8b9b09222f96" (UID: "4798d5ab-15f7-475c-bf7b-8b9b09222f96"). InnerVolumeSpecName "kube-api-access-qktmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.719345 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ceph" (OuterVolumeSpecName: "ceph") pod "4798d5ab-15f7-475c-bf7b-8b9b09222f96" (UID: "4798d5ab-15f7-475c-bf7b-8b9b09222f96"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.746622 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4798d5ab-15f7-475c-bf7b-8b9b09222f96" (UID: "4798d5ab-15f7-475c-bf7b-8b9b09222f96"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.760435 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-inventory" (OuterVolumeSpecName: "inventory") pod "4798d5ab-15f7-475c-bf7b-8b9b09222f96" (UID: "4798d5ab-15f7-475c-bf7b-8b9b09222f96"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.817128 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.817170 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.817185 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qktmw\" (UniqueName: \"kubernetes.io/projected/4798d5ab-15f7-475c-bf7b-8b9b09222f96-kube-api-access-qktmw\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:51 crc kubenswrapper[4922]: I0930 00:33:51.817195 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4798d5ab-15f7-475c-bf7b-8b9b09222f96-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.151429 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" event={"ID":"4798d5ab-15f7-475c-bf7b-8b9b09222f96","Type":"ContainerDied","Data":"6e4850a72c5abcd80d88a9a4a5309bc2b7053fe28afa5ac55a6b0d70714d41e6"} Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.151481 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e4850a72c5abcd80d88a9a4a5309bc2b7053fe28afa5ac55a6b0d70714d41e6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.151557 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-sdvr7" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.234894 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-f26v6"] Sep 30 00:33:52 crc kubenswrapper[4922]: E0930 00:33:52.235402 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4798d5ab-15f7-475c-bf7b-8b9b09222f96" containerName="validate-network-openstack-openstack-cell1" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.235451 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4798d5ab-15f7-475c-bf7b-8b9b09222f96" containerName="validate-network-openstack-openstack-cell1" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.235857 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4798d5ab-15f7-475c-bf7b-8b9b09222f96" containerName="validate-network-openstack-openstack-cell1" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.236873 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.240578 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.240781 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.241041 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.241231 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.267293 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-f26v6"] Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.331525 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ssh-key\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.331622 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ceph\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.331744 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-inventory\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.331818 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzqst\" (UniqueName: \"kubernetes.io/projected/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-kube-api-access-hzqst\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.433641 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-inventory\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.433888 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzqst\" (UniqueName: \"kubernetes.io/projected/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-kube-api-access-hzqst\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.433996 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ssh-key\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.434066 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ceph\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.439160 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ceph\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.441275 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ssh-key\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.445075 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-inventory\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.453030 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzqst\" (UniqueName: \"kubernetes.io/projected/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-kube-api-access-hzqst\") pod \"install-os-openstack-openstack-cell1-f26v6\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:52 crc kubenswrapper[4922]: I0930 00:33:52.554483 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:33:53 crc kubenswrapper[4922]: I0930 00:33:53.096032 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-f26v6"] Sep 30 00:33:53 crc kubenswrapper[4922]: I0930 00:33:53.167817 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-f26v6" event={"ID":"8c975a75-0ee9-4ec6-8318-ced1a887e9f9","Type":"ContainerStarted","Data":"dceb535e45603595e8114e79db39978e896b867fdd342f86a60ef9f36798f7a0"} Sep 30 00:33:54 crc kubenswrapper[4922]: I0930 00:33:54.185179 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-f26v6" event={"ID":"8c975a75-0ee9-4ec6-8318-ced1a887e9f9","Type":"ContainerStarted","Data":"e24380c6686e88953ca6bb67e7849d1146c9c8486eb5fc1d2ca761c1847c2860"} Sep 30 00:33:54 crc kubenswrapper[4922]: I0930 00:33:54.218858 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-f26v6" podStartSLOduration=1.933127933 podStartE2EDuration="2.218834066s" podCreationTimestamp="2025-09-30 00:33:52 +0000 UTC" firstStartedPulling="2025-09-30 00:33:53.11142671 +0000 UTC m=+7637.421715523" lastFinishedPulling="2025-09-30 00:33:53.397132853 +0000 UTC m=+7637.707421656" observedRunningTime="2025-09-30 00:33:54.208965812 +0000 UTC m=+7638.519254625" watchObservedRunningTime="2025-09-30 00:33:54.218834066 +0000 UTC m=+7638.529122889" Sep 30 00:33:55 crc kubenswrapper[4922]: I0930 00:33:55.421709 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:33:55 crc kubenswrapper[4922]: E0930 00:33:55.421962 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:34:10 crc kubenswrapper[4922]: I0930 00:34:10.422363 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:34:10 crc kubenswrapper[4922]: E0930 00:34:10.423496 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:34:23 crc kubenswrapper[4922]: I0930 00:34:23.422765 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:34:23 crc kubenswrapper[4922]: E0930 00:34:23.424022 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:34:37 crc kubenswrapper[4922]: I0930 00:34:37.422139 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:34:37 crc kubenswrapper[4922]: E0930 00:34:37.422929 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:34:37 crc kubenswrapper[4922]: I0930 00:34:37.655021 4922 generic.go:334] "Generic (PLEG): container finished" podID="8c975a75-0ee9-4ec6-8318-ced1a887e9f9" containerID="e24380c6686e88953ca6bb67e7849d1146c9c8486eb5fc1d2ca761c1847c2860" exitCode=0 Sep 30 00:34:37 crc kubenswrapper[4922]: I0930 00:34:37.655062 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-f26v6" event={"ID":"8c975a75-0ee9-4ec6-8318-ced1a887e9f9","Type":"ContainerDied","Data":"e24380c6686e88953ca6bb67e7849d1146c9c8486eb5fc1d2ca761c1847c2860"} Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.125605 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.247591 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzqst\" (UniqueName: \"kubernetes.io/projected/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-kube-api-access-hzqst\") pod \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.247758 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ceph\") pod \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.247871 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ssh-key\") pod \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.247954 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-inventory\") pod \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\" (UID: \"8c975a75-0ee9-4ec6-8318-ced1a887e9f9\") " Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.252913 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ceph" (OuterVolumeSpecName: "ceph") pod "8c975a75-0ee9-4ec6-8318-ced1a887e9f9" (UID: "8c975a75-0ee9-4ec6-8318-ced1a887e9f9"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.252957 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-kube-api-access-hzqst" (OuterVolumeSpecName: "kube-api-access-hzqst") pod "8c975a75-0ee9-4ec6-8318-ced1a887e9f9" (UID: "8c975a75-0ee9-4ec6-8318-ced1a887e9f9"). InnerVolumeSpecName "kube-api-access-hzqst". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.276557 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-inventory" (OuterVolumeSpecName: "inventory") pod "8c975a75-0ee9-4ec6-8318-ced1a887e9f9" (UID: "8c975a75-0ee9-4ec6-8318-ced1a887e9f9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.283808 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8c975a75-0ee9-4ec6-8318-ced1a887e9f9" (UID: "8c975a75-0ee9-4ec6-8318-ced1a887e9f9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.350827 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.350877 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.350896 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzqst\" (UniqueName: \"kubernetes.io/projected/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-kube-api-access-hzqst\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.350915 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8c975a75-0ee9-4ec6-8318-ced1a887e9f9-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.676621 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-f26v6" event={"ID":"8c975a75-0ee9-4ec6-8318-ced1a887e9f9","Type":"ContainerDied","Data":"dceb535e45603595e8114e79db39978e896b867fdd342f86a60ef9f36798f7a0"} Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.676919 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dceb535e45603595e8114e79db39978e896b867fdd342f86a60ef9f36798f7a0" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.676840 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-f26v6" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.760766 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-6cgwb"] Sep 30 00:34:39 crc kubenswrapper[4922]: E0930 00:34:39.761696 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c975a75-0ee9-4ec6-8318-ced1a887e9f9" containerName="install-os-openstack-openstack-cell1" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.761721 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c975a75-0ee9-4ec6-8318-ced1a887e9f9" containerName="install-os-openstack-openstack-cell1" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.761961 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c975a75-0ee9-4ec6-8318-ced1a887e9f9" containerName="install-os-openstack-openstack-cell1" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.762915 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.764509 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.764558 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.765009 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.766113 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.774554 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-6cgwb"] Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.862902 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ceph\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.862993 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkf88\" (UniqueName: \"kubernetes.io/projected/8de83491-9e2b-415e-b765-ef041d9172f1-kube-api-access-vkf88\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.863050 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ssh-key\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.863143 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-inventory\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.965151 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ceph\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.965238 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkf88\" (UniqueName: \"kubernetes.io/projected/8de83491-9e2b-415e-b765-ef041d9172f1-kube-api-access-vkf88\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.965270 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ssh-key\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.966504 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-inventory\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.969494 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ssh-key\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.969742 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-inventory\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.971447 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ceph\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:39 crc kubenswrapper[4922]: I0930 00:34:39.984658 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkf88\" (UniqueName: \"kubernetes.io/projected/8de83491-9e2b-415e-b765-ef041d9172f1-kube-api-access-vkf88\") pod \"configure-os-openstack-openstack-cell1-6cgwb\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:40 crc kubenswrapper[4922]: I0930 00:34:40.083384 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:34:40 crc kubenswrapper[4922]: I0930 00:34:40.757339 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-6cgwb"] Sep 30 00:34:41 crc kubenswrapper[4922]: I0930 00:34:41.701358 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" event={"ID":"8de83491-9e2b-415e-b765-ef041d9172f1","Type":"ContainerStarted","Data":"ccfe95e8fb0b8e71627a0d86925418d6de336bdf97a8dd8450004fb6b21cf2cb"} Sep 30 00:34:41 crc kubenswrapper[4922]: I0930 00:34:41.702263 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" event={"ID":"8de83491-9e2b-415e-b765-ef041d9172f1","Type":"ContainerStarted","Data":"4dcb7919e8e17840aa4b0a851026fde7f0d7fed192dc2632bf2080791d22d1ec"} Sep 30 00:34:41 crc kubenswrapper[4922]: I0930 00:34:41.720711 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" podStartSLOduration=2.556877143 podStartE2EDuration="2.720688472s" podCreationTimestamp="2025-09-30 00:34:39 +0000 UTC" firstStartedPulling="2025-09-30 00:34:40.75705617 +0000 UTC m=+7685.067344983" lastFinishedPulling="2025-09-30 00:34:40.920867509 +0000 UTC m=+7685.231156312" observedRunningTime="2025-09-30 00:34:41.719358769 +0000 UTC m=+7686.029647592" watchObservedRunningTime="2025-09-30 00:34:41.720688472 +0000 UTC m=+7686.030977305" Sep 30 00:34:48 crc kubenswrapper[4922]: I0930 00:34:48.434958 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:34:48 crc kubenswrapper[4922]: E0930 00:34:48.441368 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:34:59 crc kubenswrapper[4922]: I0930 00:34:59.422441 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:34:59 crc kubenswrapper[4922]: E0930 00:34:59.422979 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:35:12 crc kubenswrapper[4922]: I0930 00:35:12.422520 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:35:12 crc kubenswrapper[4922]: E0930 00:35:12.423440 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:35:23 crc kubenswrapper[4922]: I0930 00:35:23.423470 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:35:23 crc kubenswrapper[4922]: E0930 00:35:23.425470 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:35:27 crc kubenswrapper[4922]: I0930 00:35:27.231464 4922 generic.go:334] "Generic (PLEG): container finished" podID="8de83491-9e2b-415e-b765-ef041d9172f1" containerID="ccfe95e8fb0b8e71627a0d86925418d6de336bdf97a8dd8450004fb6b21cf2cb" exitCode=0 Sep 30 00:35:27 crc kubenswrapper[4922]: I0930 00:35:27.231560 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" event={"ID":"8de83491-9e2b-415e-b765-ef041d9172f1","Type":"ContainerDied","Data":"ccfe95e8fb0b8e71627a0d86925418d6de336bdf97a8dd8450004fb6b21cf2cb"} Sep 30 00:35:28 crc kubenswrapper[4922]: I0930 00:35:28.831755 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:35:28 crc kubenswrapper[4922]: I0930 00:35:28.940796 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ssh-key\") pod \"8de83491-9e2b-415e-b765-ef041d9172f1\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " Sep 30 00:35:28 crc kubenswrapper[4922]: I0930 00:35:28.941146 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkf88\" (UniqueName: \"kubernetes.io/projected/8de83491-9e2b-415e-b765-ef041d9172f1-kube-api-access-vkf88\") pod \"8de83491-9e2b-415e-b765-ef041d9172f1\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " Sep 30 00:35:28 crc kubenswrapper[4922]: I0930 00:35:28.941285 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ceph\") pod \"8de83491-9e2b-415e-b765-ef041d9172f1\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " Sep 30 00:35:28 crc kubenswrapper[4922]: I0930 00:35:28.941427 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-inventory\") pod \"8de83491-9e2b-415e-b765-ef041d9172f1\" (UID: \"8de83491-9e2b-415e-b765-ef041d9172f1\") " Sep 30 00:35:28 crc kubenswrapper[4922]: I0930 00:35:28.948045 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8de83491-9e2b-415e-b765-ef041d9172f1-kube-api-access-vkf88" (OuterVolumeSpecName: "kube-api-access-vkf88") pod "8de83491-9e2b-415e-b765-ef041d9172f1" (UID: "8de83491-9e2b-415e-b765-ef041d9172f1"). InnerVolumeSpecName "kube-api-access-vkf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:35:28 crc kubenswrapper[4922]: I0930 00:35:28.956751 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ceph" (OuterVolumeSpecName: "ceph") pod "8de83491-9e2b-415e-b765-ef041d9172f1" (UID: "8de83491-9e2b-415e-b765-ef041d9172f1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:28 crc kubenswrapper[4922]: I0930 00:35:28.979533 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-inventory" (OuterVolumeSpecName: "inventory") pod "8de83491-9e2b-415e-b765-ef041d9172f1" (UID: "8de83491-9e2b-415e-b765-ef041d9172f1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:28 crc kubenswrapper[4922]: I0930 00:35:28.993137 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8de83491-9e2b-415e-b765-ef041d9172f1" (UID: "8de83491-9e2b-415e-b765-ef041d9172f1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.050411 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkf88\" (UniqueName: \"kubernetes.io/projected/8de83491-9e2b-415e-b765-ef041d9172f1-kube-api-access-vkf88\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.051015 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.051135 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.051209 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8de83491-9e2b-415e-b765-ef041d9172f1-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.269643 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" event={"ID":"8de83491-9e2b-415e-b765-ef041d9172f1","Type":"ContainerDied","Data":"4dcb7919e8e17840aa4b0a851026fde7f0d7fed192dc2632bf2080791d22d1ec"} Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.269689 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4dcb7919e8e17840aa4b0a851026fde7f0d7fed192dc2632bf2080791d22d1ec" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.269727 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-6cgwb" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.368037 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-xc6qw"] Sep 30 00:35:29 crc kubenswrapper[4922]: E0930 00:35:29.368559 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8de83491-9e2b-415e-b765-ef041d9172f1" containerName="configure-os-openstack-openstack-cell1" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.368580 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8de83491-9e2b-415e-b765-ef041d9172f1" containerName="configure-os-openstack-openstack-cell1" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.368852 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8de83491-9e2b-415e-b765-ef041d9172f1" containerName="configure-os-openstack-openstack-cell1" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.369811 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.372819 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.372975 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.373104 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.373248 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.382325 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-xc6qw"] Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.567844 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-inventory-0\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.568086 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ceph\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.568270 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptv6v\" (UniqueName: \"kubernetes.io/projected/c5c82e19-0d12-49b8-9578-757544eb77e6-kube-api-access-ptv6v\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.568446 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.670870 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ceph\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.671041 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptv6v\" (UniqueName: \"kubernetes.io/projected/c5c82e19-0d12-49b8-9578-757544eb77e6-kube-api-access-ptv6v\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.671292 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.672259 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-inventory-0\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.674418 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ceph\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.677073 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-inventory-0\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.689055 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.692159 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptv6v\" (UniqueName: \"kubernetes.io/projected/c5c82e19-0d12-49b8-9578-757544eb77e6-kube-api-access-ptv6v\") pod \"ssh-known-hosts-openstack-xc6qw\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:29 crc kubenswrapper[4922]: I0930 00:35:29.733851 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:30 crc kubenswrapper[4922]: I0930 00:35:30.249707 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-xc6qw"] Sep 30 00:35:30 crc kubenswrapper[4922]: I0930 00:35:30.280080 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-xc6qw" event={"ID":"c5c82e19-0d12-49b8-9578-757544eb77e6","Type":"ContainerStarted","Data":"614aecfa46afd38247b471dcfe4fa5fb8cc1b10a71cbcfa0432bd329809f6b6e"} Sep 30 00:35:31 crc kubenswrapper[4922]: I0930 00:35:31.297179 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-xc6qw" event={"ID":"c5c82e19-0d12-49b8-9578-757544eb77e6","Type":"ContainerStarted","Data":"a70a53334d20f7a64bf7281b7cda11c69f326eebce0e9bd7926e9a9707b9edab"} Sep 30 00:35:31 crc kubenswrapper[4922]: I0930 00:35:31.315328 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-xc6qw" podStartSLOduration=2.119527073 podStartE2EDuration="2.315310673s" podCreationTimestamp="2025-09-30 00:35:29 +0000 UTC" firstStartedPulling="2025-09-30 00:35:30.25899193 +0000 UTC m=+7734.569280743" lastFinishedPulling="2025-09-30 00:35:30.45477551 +0000 UTC m=+7734.765064343" observedRunningTime="2025-09-30 00:35:31.313469578 +0000 UTC m=+7735.623758391" watchObservedRunningTime="2025-09-30 00:35:31.315310673 +0000 UTC m=+7735.625599486" Sep 30 00:35:36 crc kubenswrapper[4922]: I0930 00:35:36.446738 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:35:37 crc kubenswrapper[4922]: I0930 00:35:37.373006 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"701d764a4a96ad9e037fd51fa1aa45fdf94245706ac842becc5cb33c81251bc3"} Sep 30 00:35:39 crc kubenswrapper[4922]: I0930 00:35:39.396980 4922 generic.go:334] "Generic (PLEG): container finished" podID="c5c82e19-0d12-49b8-9578-757544eb77e6" containerID="a70a53334d20f7a64bf7281b7cda11c69f326eebce0e9bd7926e9a9707b9edab" exitCode=0 Sep 30 00:35:39 crc kubenswrapper[4922]: I0930 00:35:39.397071 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-xc6qw" event={"ID":"c5c82e19-0d12-49b8-9578-757544eb77e6","Type":"ContainerDied","Data":"a70a53334d20f7a64bf7281b7cda11c69f326eebce0e9bd7926e9a9707b9edab"} Sep 30 00:35:40 crc kubenswrapper[4922]: I0930 00:35:40.967099 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.052639 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptv6v\" (UniqueName: \"kubernetes.io/projected/c5c82e19-0d12-49b8-9578-757544eb77e6-kube-api-access-ptv6v\") pod \"c5c82e19-0d12-49b8-9578-757544eb77e6\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.052845 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ceph\") pod \"c5c82e19-0d12-49b8-9578-757544eb77e6\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.052939 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-inventory-0\") pod \"c5c82e19-0d12-49b8-9578-757544eb77e6\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.053004 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ssh-key-openstack-cell1\") pod \"c5c82e19-0d12-49b8-9578-757544eb77e6\" (UID: \"c5c82e19-0d12-49b8-9578-757544eb77e6\") " Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.058349 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ceph" (OuterVolumeSpecName: "ceph") pod "c5c82e19-0d12-49b8-9578-757544eb77e6" (UID: "c5c82e19-0d12-49b8-9578-757544eb77e6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.059955 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5c82e19-0d12-49b8-9578-757544eb77e6-kube-api-access-ptv6v" (OuterVolumeSpecName: "kube-api-access-ptv6v") pod "c5c82e19-0d12-49b8-9578-757544eb77e6" (UID: "c5c82e19-0d12-49b8-9578-757544eb77e6"). InnerVolumeSpecName "kube-api-access-ptv6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.084570 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "c5c82e19-0d12-49b8-9578-757544eb77e6" (UID: "c5c82e19-0d12-49b8-9578-757544eb77e6"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.095890 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "c5c82e19-0d12-49b8-9578-757544eb77e6" (UID: "c5c82e19-0d12-49b8-9578-757544eb77e6"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.155500 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.155561 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptv6v\" (UniqueName: \"kubernetes.io/projected/c5c82e19-0d12-49b8-9578-757544eb77e6-kube-api-access-ptv6v\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.155576 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.155590 4922 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c5c82e19-0d12-49b8-9578-757544eb77e6-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.422273 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-xc6qw" event={"ID":"c5c82e19-0d12-49b8-9578-757544eb77e6","Type":"ContainerDied","Data":"614aecfa46afd38247b471dcfe4fa5fb8cc1b10a71cbcfa0432bd329809f6b6e"} Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.422305 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-xc6qw" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.422336 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="614aecfa46afd38247b471dcfe4fa5fb8cc1b10a71cbcfa0432bd329809f6b6e" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.518431 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-g4f48"] Sep 30 00:35:41 crc kubenswrapper[4922]: E0930 00:35:41.519001 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5c82e19-0d12-49b8-9578-757544eb77e6" containerName="ssh-known-hosts-openstack" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.519023 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5c82e19-0d12-49b8-9578-757544eb77e6" containerName="ssh-known-hosts-openstack" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.519281 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5c82e19-0d12-49b8-9578-757544eb77e6" containerName="ssh-known-hosts-openstack" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.520124 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.523910 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.523948 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.524529 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.530715 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.553664 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-g4f48"] Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.574114 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ssh-key\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.574258 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ceph\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.574315 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-inventory\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.574350 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78ncc\" (UniqueName: \"kubernetes.io/projected/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-kube-api-access-78ncc\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.678078 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ceph\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.678244 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-inventory\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.678302 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78ncc\" (UniqueName: \"kubernetes.io/projected/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-kube-api-access-78ncc\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.678995 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ssh-key\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.684581 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ceph\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.685278 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ssh-key\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.685569 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-inventory\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.705436 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78ncc\" (UniqueName: \"kubernetes.io/projected/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-kube-api-access-78ncc\") pod \"run-os-openstack-openstack-cell1-g4f48\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:41 crc kubenswrapper[4922]: I0930 00:35:41.855606 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:42 crc kubenswrapper[4922]: I0930 00:35:42.459019 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-g4f48"] Sep 30 00:35:43 crc kubenswrapper[4922]: I0930 00:35:43.445312 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-g4f48" event={"ID":"8cfc3265-59ce-4e4c-b01e-a2091edeacc2","Type":"ContainerStarted","Data":"53edd99d63eb48c67f4880d7c331d4bf5a38b2d719adf6e01695e3b630e5458e"} Sep 30 00:35:43 crc kubenswrapper[4922]: I0930 00:35:43.446118 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-g4f48" event={"ID":"8cfc3265-59ce-4e4c-b01e-a2091edeacc2","Type":"ContainerStarted","Data":"47ec6c742eefb7a8d7846664b8be0b213cd28b15561c65f6c615552f7562f818"} Sep 30 00:35:43 crc kubenswrapper[4922]: I0930 00:35:43.470105 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-g4f48" podStartSLOduration=2.327803773 podStartE2EDuration="2.470080941s" podCreationTimestamp="2025-09-30 00:35:41 +0000 UTC" firstStartedPulling="2025-09-30 00:35:42.46887869 +0000 UTC m=+7746.779167503" lastFinishedPulling="2025-09-30 00:35:42.611155858 +0000 UTC m=+7746.921444671" observedRunningTime="2025-09-30 00:35:43.462007172 +0000 UTC m=+7747.772295985" watchObservedRunningTime="2025-09-30 00:35:43.470080941 +0000 UTC m=+7747.780369754" Sep 30 00:35:51 crc kubenswrapper[4922]: I0930 00:35:51.536926 4922 generic.go:334] "Generic (PLEG): container finished" podID="8cfc3265-59ce-4e4c-b01e-a2091edeacc2" containerID="53edd99d63eb48c67f4880d7c331d4bf5a38b2d719adf6e01695e3b630e5458e" exitCode=0 Sep 30 00:35:51 crc kubenswrapper[4922]: I0930 00:35:51.537051 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-g4f48" event={"ID":"8cfc3265-59ce-4e4c-b01e-a2091edeacc2","Type":"ContainerDied","Data":"53edd99d63eb48c67f4880d7c331d4bf5a38b2d719adf6e01695e3b630e5458e"} Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.058674 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.163996 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-inventory\") pod \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.164072 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78ncc\" (UniqueName: \"kubernetes.io/projected/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-kube-api-access-78ncc\") pod \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.164313 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ceph\") pod \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.164508 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ssh-key\") pod \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\" (UID: \"8cfc3265-59ce-4e4c-b01e-a2091edeacc2\") " Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.171232 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ceph" (OuterVolumeSpecName: "ceph") pod "8cfc3265-59ce-4e4c-b01e-a2091edeacc2" (UID: "8cfc3265-59ce-4e4c-b01e-a2091edeacc2"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.171552 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-kube-api-access-78ncc" (OuterVolumeSpecName: "kube-api-access-78ncc") pod "8cfc3265-59ce-4e4c-b01e-a2091edeacc2" (UID: "8cfc3265-59ce-4e4c-b01e-a2091edeacc2"). InnerVolumeSpecName "kube-api-access-78ncc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.212305 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-inventory" (OuterVolumeSpecName: "inventory") pod "8cfc3265-59ce-4e4c-b01e-a2091edeacc2" (UID: "8cfc3265-59ce-4e4c-b01e-a2091edeacc2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.214826 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8cfc3265-59ce-4e4c-b01e-a2091edeacc2" (UID: "8cfc3265-59ce-4e4c-b01e-a2091edeacc2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.267047 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.267086 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.267102 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.267116 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78ncc\" (UniqueName: \"kubernetes.io/projected/8cfc3265-59ce-4e4c-b01e-a2091edeacc2-kube-api-access-78ncc\") on node \"crc\" DevicePath \"\"" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.566911 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-g4f48" event={"ID":"8cfc3265-59ce-4e4c-b01e-a2091edeacc2","Type":"ContainerDied","Data":"47ec6c742eefb7a8d7846664b8be0b213cd28b15561c65f6c615552f7562f818"} Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.566962 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47ec6c742eefb7a8d7846664b8be0b213cd28b15561c65f6c615552f7562f818" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.567024 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-g4f48" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.660739 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-vwq4t"] Sep 30 00:35:53 crc kubenswrapper[4922]: E0930 00:35:53.663982 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cfc3265-59ce-4e4c-b01e-a2091edeacc2" containerName="run-os-openstack-openstack-cell1" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.664027 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cfc3265-59ce-4e4c-b01e-a2091edeacc2" containerName="run-os-openstack-openstack-cell1" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.664612 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cfc3265-59ce-4e4c-b01e-a2091edeacc2" containerName="run-os-openstack-openstack-cell1" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.666123 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.670077 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.670421 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.670650 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.670868 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.686222 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-vwq4t"] Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.778947 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ceph\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.779015 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-inventory\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.779301 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl6fl\" (UniqueName: \"kubernetes.io/projected/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-kube-api-access-nl6fl\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.779598 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.882265 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ceph\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.882312 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-inventory\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.882443 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl6fl\" (UniqueName: \"kubernetes.io/projected/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-kube-api-access-nl6fl\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.882517 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.887760 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.898604 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-inventory\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.901517 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ceph\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.902839 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl6fl\" (UniqueName: \"kubernetes.io/projected/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-kube-api-access-nl6fl\") pod \"reboot-os-openstack-openstack-cell1-vwq4t\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:53 crc kubenswrapper[4922]: I0930 00:35:53.990714 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:35:54 crc kubenswrapper[4922]: I0930 00:35:54.583947 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-vwq4t"] Sep 30 00:35:55 crc kubenswrapper[4922]: I0930 00:35:55.589447 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" event={"ID":"af5fe853-df4f-4d18-b1e0-0a476eef7e4a","Type":"ContainerStarted","Data":"348c9654d8c512314cebb83a0354b33916ec1412a8a8d5ded49dca6846ef8eb3"} Sep 30 00:35:55 crc kubenswrapper[4922]: I0930 00:35:55.590927 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" event={"ID":"af5fe853-df4f-4d18-b1e0-0a476eef7e4a","Type":"ContainerStarted","Data":"a1afed7cb133328ed6524282aabbd6efb933b6ec99d67527a2596499f7cd246d"} Sep 30 00:35:55 crc kubenswrapper[4922]: I0930 00:35:55.612945 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" podStartSLOduration=2.408122673 podStartE2EDuration="2.612925466s" podCreationTimestamp="2025-09-30 00:35:53 +0000 UTC" firstStartedPulling="2025-09-30 00:35:54.583726073 +0000 UTC m=+7758.894014886" lastFinishedPulling="2025-09-30 00:35:54.788528866 +0000 UTC m=+7759.098817679" observedRunningTime="2025-09-30 00:35:55.605080832 +0000 UTC m=+7759.915369645" watchObservedRunningTime="2025-09-30 00:35:55.612925466 +0000 UTC m=+7759.923214279" Sep 30 00:36:10 crc kubenswrapper[4922]: I0930 00:36:10.824716 4922 generic.go:334] "Generic (PLEG): container finished" podID="af5fe853-df4f-4d18-b1e0-0a476eef7e4a" containerID="348c9654d8c512314cebb83a0354b33916ec1412a8a8d5ded49dca6846ef8eb3" exitCode=0 Sep 30 00:36:10 crc kubenswrapper[4922]: I0930 00:36:10.825266 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" event={"ID":"af5fe853-df4f-4d18-b1e0-0a476eef7e4a","Type":"ContainerDied","Data":"348c9654d8c512314cebb83a0354b33916ec1412a8a8d5ded49dca6846ef8eb3"} Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.407084 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.536703 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl6fl\" (UniqueName: \"kubernetes.io/projected/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-kube-api-access-nl6fl\") pod \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.536827 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ceph\") pod \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.537094 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-inventory\") pod \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.537162 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ssh-key\") pod \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\" (UID: \"af5fe853-df4f-4d18-b1e0-0a476eef7e4a\") " Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.543112 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-kube-api-access-nl6fl" (OuterVolumeSpecName: "kube-api-access-nl6fl") pod "af5fe853-df4f-4d18-b1e0-0a476eef7e4a" (UID: "af5fe853-df4f-4d18-b1e0-0a476eef7e4a"). InnerVolumeSpecName "kube-api-access-nl6fl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.543615 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ceph" (OuterVolumeSpecName: "ceph") pod "af5fe853-df4f-4d18-b1e0-0a476eef7e4a" (UID: "af5fe853-df4f-4d18-b1e0-0a476eef7e4a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.578021 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "af5fe853-df4f-4d18-b1e0-0a476eef7e4a" (UID: "af5fe853-df4f-4d18-b1e0-0a476eef7e4a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.582364 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-inventory" (OuterVolumeSpecName: "inventory") pod "af5fe853-df4f-4d18-b1e0-0a476eef7e4a" (UID: "af5fe853-df4f-4d18-b1e0-0a476eef7e4a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.640448 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl6fl\" (UniqueName: \"kubernetes.io/projected/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-kube-api-access-nl6fl\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.640476 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.640485 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.640494 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af5fe853-df4f-4d18-b1e0-0a476eef7e4a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.850894 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" event={"ID":"af5fe853-df4f-4d18-b1e0-0a476eef7e4a","Type":"ContainerDied","Data":"a1afed7cb133328ed6524282aabbd6efb933b6ec99d67527a2596499f7cd246d"} Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.851199 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1afed7cb133328ed6524282aabbd6efb933b6ec99d67527a2596499f7cd246d" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.850941 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-vwq4t" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.940678 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-pwkzf"] Sep 30 00:36:12 crc kubenswrapper[4922]: E0930 00:36:12.941217 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af5fe853-df4f-4d18-b1e0-0a476eef7e4a" containerName="reboot-os-openstack-openstack-cell1" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.941240 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="af5fe853-df4f-4d18-b1e0-0a476eef7e4a" containerName="reboot-os-openstack-openstack-cell1" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.941558 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="af5fe853-df4f-4d18-b1e0-0a476eef7e4a" containerName="reboot-os-openstack-openstack-cell1" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.942495 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.950618 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.950751 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.950841 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.950960 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-pwkzf"] Sep 30 00:36:12 crc kubenswrapper[4922]: I0930 00:36:12.951131 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.049608 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.049710 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ssh-key\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.049753 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ceph\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.049807 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.049833 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.049876 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.049944 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8g7g\" (UniqueName: \"kubernetes.io/projected/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-kube-api-access-r8g7g\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.049980 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.050021 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-inventory\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.050052 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.050113 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.050154 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.152696 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.152848 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.152931 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ssh-key\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.152979 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ceph\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.153031 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.153061 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.153100 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.153167 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8g7g\" (UniqueName: \"kubernetes.io/projected/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-kube-api-access-r8g7g\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.153206 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.153258 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-inventory\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.153292 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.153367 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.160295 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-inventory\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.161204 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ssh-key\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.161233 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.162310 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.162459 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.163770 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.164286 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ceph\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.164456 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.165715 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.165922 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.171178 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.174986 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8g7g\" (UniqueName: \"kubernetes.io/projected/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-kube-api-access-r8g7g\") pod \"install-certs-openstack-openstack-cell1-pwkzf\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.276148 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:13 crc kubenswrapper[4922]: I0930 00:36:13.875459 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-pwkzf"] Sep 30 00:36:14 crc kubenswrapper[4922]: I0930 00:36:14.880966 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" event={"ID":"94d57e1e-85f4-4e65-9ce3-1fe527634f3a","Type":"ContainerStarted","Data":"de3e06fbce15f338965a9cdde1a039395837fc774a044dfe1c6f084e8dd926fa"} Sep 30 00:36:15 crc kubenswrapper[4922]: I0930 00:36:15.890975 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" event={"ID":"94d57e1e-85f4-4e65-9ce3-1fe527634f3a","Type":"ContainerStarted","Data":"508a564d325026f518dde361729c02e5111c8db471c3c4209e1bebc21ca90326"} Sep 30 00:36:15 crc kubenswrapper[4922]: I0930 00:36:15.921086 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" podStartSLOduration=3.201728961 podStartE2EDuration="3.921065654s" podCreationTimestamp="2025-09-30 00:36:12 +0000 UTC" firstStartedPulling="2025-09-30 00:36:13.888973658 +0000 UTC m=+7778.199262471" lastFinishedPulling="2025-09-30 00:36:14.608310331 +0000 UTC m=+7778.918599164" observedRunningTime="2025-09-30 00:36:15.912597805 +0000 UTC m=+7780.222886638" watchObservedRunningTime="2025-09-30 00:36:15.921065654 +0000 UTC m=+7780.231354467" Sep 30 00:36:35 crc kubenswrapper[4922]: I0930 00:36:35.078848 4922 generic.go:334] "Generic (PLEG): container finished" podID="94d57e1e-85f4-4e65-9ce3-1fe527634f3a" containerID="508a564d325026f518dde361729c02e5111c8db471c3c4209e1bebc21ca90326" exitCode=0 Sep 30 00:36:35 crc kubenswrapper[4922]: I0930 00:36:35.078943 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" event={"ID":"94d57e1e-85f4-4e65-9ce3-1fe527634f3a","Type":"ContainerDied","Data":"508a564d325026f518dde361729c02e5111c8db471c3c4209e1bebc21ca90326"} Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.539411 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.610838 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-bootstrap-combined-ca-bundle\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.610884 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-sriov-combined-ca-bundle\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.610908 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-dhcp-combined-ca-bundle\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.610998 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ceph\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.611018 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-inventory\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.611067 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8g7g\" (UniqueName: \"kubernetes.io/projected/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-kube-api-access-r8g7g\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.611112 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-nova-combined-ca-bundle\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.611142 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-libvirt-combined-ca-bundle\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.611168 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ssh-key\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.611237 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ovn-combined-ca-bundle\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.611305 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-telemetry-combined-ca-bundle\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.611336 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-metadata-combined-ca-bundle\") pod \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\" (UID: \"94d57e1e-85f4-4e65-9ce3-1fe527634f3a\") " Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.619849 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.620056 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.629633 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.631374 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.631527 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.631588 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.631596 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.631947 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-kube-api-access-r8g7g" (OuterVolumeSpecName: "kube-api-access-r8g7g") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "kube-api-access-r8g7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.635650 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ceph" (OuterVolumeSpecName: "ceph") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.641572 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.651677 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.655619 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-inventory" (OuterVolumeSpecName: "inventory") pod "94d57e1e-85f4-4e65-9ce3-1fe527634f3a" (UID: "94d57e1e-85f4-4e65-9ce3-1fe527634f3a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714418 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714467 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714482 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8g7g\" (UniqueName: \"kubernetes.io/projected/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-kube-api-access-r8g7g\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714496 4922 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714509 4922 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714520 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714530 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714540 4922 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714551 4922 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714567 4922 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714581 4922 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:36 crc kubenswrapper[4922]: I0930 00:36:36.714595 4922 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d57e1e-85f4-4e65-9ce3-1fe527634f3a-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.108464 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" event={"ID":"94d57e1e-85f4-4e65-9ce3-1fe527634f3a","Type":"ContainerDied","Data":"de3e06fbce15f338965a9cdde1a039395837fc774a044dfe1c6f084e8dd926fa"} Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.108534 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de3e06fbce15f338965a9cdde1a039395837fc774a044dfe1c6f084e8dd926fa" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.108642 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-pwkzf" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.193690 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-2sm7q"] Sep 30 00:36:37 crc kubenswrapper[4922]: E0930 00:36:37.194264 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94d57e1e-85f4-4e65-9ce3-1fe527634f3a" containerName="install-certs-openstack-openstack-cell1" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.194290 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="94d57e1e-85f4-4e65-9ce3-1fe527634f3a" containerName="install-certs-openstack-openstack-cell1" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.194705 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="94d57e1e-85f4-4e65-9ce3-1fe527634f3a" containerName="install-certs-openstack-openstack-cell1" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.195746 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.197763 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.197825 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.198211 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.199182 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.202587 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-2sm7q"] Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.328744 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-inventory\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.328809 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl9zn\" (UniqueName: \"kubernetes.io/projected/92b1643b-fc8f-4422-bd01-01ec4dcfa718-kube-api-access-pl9zn\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.329093 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ceph\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.329430 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.430670 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ceph\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.431053 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.431167 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-inventory\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.431270 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl9zn\" (UniqueName: \"kubernetes.io/projected/92b1643b-fc8f-4422-bd01-01ec4dcfa718-kube-api-access-pl9zn\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.436048 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.436961 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-inventory\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.437022 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ceph\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.446937 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl9zn\" (UniqueName: \"kubernetes.io/projected/92b1643b-fc8f-4422-bd01-01ec4dcfa718-kube-api-access-pl9zn\") pod \"ceph-client-openstack-openstack-cell1-2sm7q\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:37 crc kubenswrapper[4922]: I0930 00:36:37.523218 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:38 crc kubenswrapper[4922]: I0930 00:36:38.149561 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-2sm7q"] Sep 30 00:36:39 crc kubenswrapper[4922]: I0930 00:36:39.138078 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" event={"ID":"92b1643b-fc8f-4422-bd01-01ec4dcfa718","Type":"ContainerStarted","Data":"4b1d6bf9498cc163ccf387b0bc9fdc9127a9faa766d89337909bcc9af64eab16"} Sep 30 00:36:39 crc kubenswrapper[4922]: I0930 00:36:39.138881 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" event={"ID":"92b1643b-fc8f-4422-bd01-01ec4dcfa718","Type":"ContainerStarted","Data":"b4a5f3d6031fccf7d1c41b6749f76dc6a67a51bcebb2cce5939d3aa23ae7122d"} Sep 30 00:36:39 crc kubenswrapper[4922]: I0930 00:36:39.175869 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" podStartSLOduration=2.004389857 podStartE2EDuration="2.175843105s" podCreationTimestamp="2025-09-30 00:36:37 +0000 UTC" firstStartedPulling="2025-09-30 00:36:38.153179894 +0000 UTC m=+7802.463468707" lastFinishedPulling="2025-09-30 00:36:38.324633142 +0000 UTC m=+7802.634921955" observedRunningTime="2025-09-30 00:36:39.160007914 +0000 UTC m=+7803.470296757" watchObservedRunningTime="2025-09-30 00:36:39.175843105 +0000 UTC m=+7803.486131948" Sep 30 00:36:44 crc kubenswrapper[4922]: I0930 00:36:44.196080 4922 generic.go:334] "Generic (PLEG): container finished" podID="92b1643b-fc8f-4422-bd01-01ec4dcfa718" containerID="4b1d6bf9498cc163ccf387b0bc9fdc9127a9faa766d89337909bcc9af64eab16" exitCode=0 Sep 30 00:36:44 crc kubenswrapper[4922]: I0930 00:36:44.196180 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" event={"ID":"92b1643b-fc8f-4422-bd01-01ec4dcfa718","Type":"ContainerDied","Data":"4b1d6bf9498cc163ccf387b0bc9fdc9127a9faa766d89337909bcc9af64eab16"} Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.664239 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.723584 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ssh-key\") pod \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.723756 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ceph\") pod \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.723790 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pl9zn\" (UniqueName: \"kubernetes.io/projected/92b1643b-fc8f-4422-bd01-01ec4dcfa718-kube-api-access-pl9zn\") pod \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.723816 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-inventory\") pod \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\" (UID: \"92b1643b-fc8f-4422-bd01-01ec4dcfa718\") " Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.729227 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ceph" (OuterVolumeSpecName: "ceph") pod "92b1643b-fc8f-4422-bd01-01ec4dcfa718" (UID: "92b1643b-fc8f-4422-bd01-01ec4dcfa718"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.729538 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92b1643b-fc8f-4422-bd01-01ec4dcfa718-kube-api-access-pl9zn" (OuterVolumeSpecName: "kube-api-access-pl9zn") pod "92b1643b-fc8f-4422-bd01-01ec4dcfa718" (UID: "92b1643b-fc8f-4422-bd01-01ec4dcfa718"). InnerVolumeSpecName "kube-api-access-pl9zn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.752644 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-inventory" (OuterVolumeSpecName: "inventory") pod "92b1643b-fc8f-4422-bd01-01ec4dcfa718" (UID: "92b1643b-fc8f-4422-bd01-01ec4dcfa718"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.753334 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "92b1643b-fc8f-4422-bd01-01ec4dcfa718" (UID: "92b1643b-fc8f-4422-bd01-01ec4dcfa718"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.828007 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.828046 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.828058 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pl9zn\" (UniqueName: \"kubernetes.io/projected/92b1643b-fc8f-4422-bd01-01ec4dcfa718-kube-api-access-pl9zn\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:45 crc kubenswrapper[4922]: I0930 00:36:45.828070 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/92b1643b-fc8f-4422-bd01-01ec4dcfa718-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.221096 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" event={"ID":"92b1643b-fc8f-4422-bd01-01ec4dcfa718","Type":"ContainerDied","Data":"b4a5f3d6031fccf7d1c41b6749f76dc6a67a51bcebb2cce5939d3aa23ae7122d"} Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.221820 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4a5f3d6031fccf7d1c41b6749f76dc6a67a51bcebb2cce5939d3aa23ae7122d" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.221482 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-2sm7q" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.338697 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-f4zxv"] Sep 30 00:36:46 crc kubenswrapper[4922]: E0930 00:36:46.339133 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92b1643b-fc8f-4422-bd01-01ec4dcfa718" containerName="ceph-client-openstack-openstack-cell1" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.339149 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="92b1643b-fc8f-4422-bd01-01ec4dcfa718" containerName="ceph-client-openstack-openstack-cell1" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.339360 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="92b1643b-fc8f-4422-bd01-01ec4dcfa718" containerName="ceph-client-openstack-openstack-cell1" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.340161 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.349543 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-f4zxv"] Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.356925 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.357639 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.358775 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.358911 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.358991 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.444998 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ceph\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.445248 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ssh-key\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.445480 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.445612 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/41047bf4-d616-4327-b2b7-edd10324c5f4-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.445741 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-inventory\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.445782 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqc5q\" (UniqueName: \"kubernetes.io/projected/41047bf4-d616-4327-b2b7-edd10324c5f4-kube-api-access-mqc5q\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.548045 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.548535 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/41047bf4-d616-4327-b2b7-edd10324c5f4-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.548673 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-inventory\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.548704 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqc5q\" (UniqueName: \"kubernetes.io/projected/41047bf4-d616-4327-b2b7-edd10324c5f4-kube-api-access-mqc5q\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.548766 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ceph\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.548877 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ssh-key\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.550457 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/41047bf4-d616-4327-b2b7-edd10324c5f4-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.552808 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ceph\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.552956 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ssh-key\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.553198 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-inventory\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.558277 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.579373 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqc5q\" (UniqueName: \"kubernetes.io/projected/41047bf4-d616-4327-b2b7-edd10324c5f4-kube-api-access-mqc5q\") pod \"ovn-openstack-openstack-cell1-f4zxv\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:46 crc kubenswrapper[4922]: I0930 00:36:46.684218 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:36:47 crc kubenswrapper[4922]: I0930 00:36:47.292322 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-f4zxv"] Sep 30 00:36:48 crc kubenswrapper[4922]: I0930 00:36:48.246634 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-f4zxv" event={"ID":"41047bf4-d616-4327-b2b7-edd10324c5f4","Type":"ContainerStarted","Data":"f78568f85f8c067b7559c89bff107a7d6bfab1b0037b7a84257adb0a071cfd15"} Sep 30 00:36:48 crc kubenswrapper[4922]: I0930 00:36:48.247256 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-f4zxv" event={"ID":"41047bf4-d616-4327-b2b7-edd10324c5f4","Type":"ContainerStarted","Data":"23f7fd0578d7b0b2f355fb2a2a7eef32f47502126e75bc92aaabd4817dec7ea8"} Sep 30 00:36:48 crc kubenswrapper[4922]: I0930 00:36:48.273549 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-f4zxv" podStartSLOduration=2.147671928 podStartE2EDuration="2.273529789s" podCreationTimestamp="2025-09-30 00:36:46 +0000 UTC" firstStartedPulling="2025-09-30 00:36:47.301732426 +0000 UTC m=+7811.612021239" lastFinishedPulling="2025-09-30 00:36:47.427590267 +0000 UTC m=+7811.737879100" observedRunningTime="2025-09-30 00:36:48.269448358 +0000 UTC m=+7812.579737181" watchObservedRunningTime="2025-09-30 00:36:48.273529789 +0000 UTC m=+7812.583818602" Sep 30 00:37:18 crc kubenswrapper[4922]: I0930 00:37:18.781409 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rjlqc"] Sep 30 00:37:18 crc kubenswrapper[4922]: I0930 00:37:18.787167 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:18 crc kubenswrapper[4922]: I0930 00:37:18.799197 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rjlqc"] Sep 30 00:37:18 crc kubenswrapper[4922]: I0930 00:37:18.911987 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-catalog-content\") pod \"redhat-operators-rjlqc\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:18 crc kubenswrapper[4922]: I0930 00:37:18.912135 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-utilities\") pod \"redhat-operators-rjlqc\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:18 crc kubenswrapper[4922]: I0930 00:37:18.912196 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x4s5\" (UniqueName: \"kubernetes.io/projected/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-kube-api-access-7x4s5\") pod \"redhat-operators-rjlqc\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:19 crc kubenswrapper[4922]: I0930 00:37:19.013690 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-utilities\") pod \"redhat-operators-rjlqc\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:19 crc kubenswrapper[4922]: I0930 00:37:19.013788 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x4s5\" (UniqueName: \"kubernetes.io/projected/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-kube-api-access-7x4s5\") pod \"redhat-operators-rjlqc\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:19 crc kubenswrapper[4922]: I0930 00:37:19.013878 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-catalog-content\") pod \"redhat-operators-rjlqc\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:19 crc kubenswrapper[4922]: I0930 00:37:19.014575 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-catalog-content\") pod \"redhat-operators-rjlqc\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:19 crc kubenswrapper[4922]: I0930 00:37:19.014571 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-utilities\") pod \"redhat-operators-rjlqc\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:19 crc kubenswrapper[4922]: I0930 00:37:19.051066 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x4s5\" (UniqueName: \"kubernetes.io/projected/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-kube-api-access-7x4s5\") pod \"redhat-operators-rjlqc\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:19 crc kubenswrapper[4922]: I0930 00:37:19.125931 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:19 crc kubenswrapper[4922]: I0930 00:37:19.650051 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rjlqc"] Sep 30 00:37:20 crc kubenswrapper[4922]: I0930 00:37:20.599907 4922 generic.go:334] "Generic (PLEG): container finished" podID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerID="53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da" exitCode=0 Sep 30 00:37:20 crc kubenswrapper[4922]: I0930 00:37:20.599983 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjlqc" event={"ID":"09aac2b7-7cd3-4fc7-b075-7c57095ac51f","Type":"ContainerDied","Data":"53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da"} Sep 30 00:37:20 crc kubenswrapper[4922]: I0930 00:37:20.600497 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjlqc" event={"ID":"09aac2b7-7cd3-4fc7-b075-7c57095ac51f","Type":"ContainerStarted","Data":"2d0093e56e07d4660907f4deea430af98086726331842d014a8522b2429f008e"} Sep 30 00:37:20 crc kubenswrapper[4922]: I0930 00:37:20.603073 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:37:22 crc kubenswrapper[4922]: I0930 00:37:22.629899 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjlqc" event={"ID":"09aac2b7-7cd3-4fc7-b075-7c57095ac51f","Type":"ContainerStarted","Data":"3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff"} Sep 30 00:37:30 crc kubenswrapper[4922]: I0930 00:37:30.725006 4922 generic.go:334] "Generic (PLEG): container finished" podID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerID="3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff" exitCode=0 Sep 30 00:37:30 crc kubenswrapper[4922]: I0930 00:37:30.725082 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjlqc" event={"ID":"09aac2b7-7cd3-4fc7-b075-7c57095ac51f","Type":"ContainerDied","Data":"3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff"} Sep 30 00:37:31 crc kubenswrapper[4922]: I0930 00:37:31.737922 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjlqc" event={"ID":"09aac2b7-7cd3-4fc7-b075-7c57095ac51f","Type":"ContainerStarted","Data":"7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15"} Sep 30 00:37:39 crc kubenswrapper[4922]: I0930 00:37:39.126306 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:39 crc kubenswrapper[4922]: I0930 00:37:39.126860 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:40 crc kubenswrapper[4922]: I0930 00:37:40.206888 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rjlqc" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="registry-server" probeResult="failure" output=< Sep 30 00:37:40 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 00:37:40 crc kubenswrapper[4922]: > Sep 30 00:37:50 crc kubenswrapper[4922]: I0930 00:37:50.182414 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rjlqc" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="registry-server" probeResult="failure" output=< Sep 30 00:37:50 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 00:37:50 crc kubenswrapper[4922]: > Sep 30 00:37:57 crc kubenswrapper[4922]: I0930 00:37:57.018178 4922 generic.go:334] "Generic (PLEG): container finished" podID="41047bf4-d616-4327-b2b7-edd10324c5f4" containerID="f78568f85f8c067b7559c89bff107a7d6bfab1b0037b7a84257adb0a071cfd15" exitCode=0 Sep 30 00:37:57 crc kubenswrapper[4922]: I0930 00:37:57.018286 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-f4zxv" event={"ID":"41047bf4-d616-4327-b2b7-edd10324c5f4","Type":"ContainerDied","Data":"f78568f85f8c067b7559c89bff107a7d6bfab1b0037b7a84257adb0a071cfd15"} Sep 30 00:37:57 crc kubenswrapper[4922]: I0930 00:37:57.043758 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rjlqc" podStartSLOduration=28.428497201 podStartE2EDuration="39.04373409s" podCreationTimestamp="2025-09-30 00:37:18 +0000 UTC" firstStartedPulling="2025-09-30 00:37:20.602804452 +0000 UTC m=+7844.913093265" lastFinishedPulling="2025-09-30 00:37:31.218041341 +0000 UTC m=+7855.528330154" observedRunningTime="2025-09-30 00:37:31.767945626 +0000 UTC m=+7856.078234439" watchObservedRunningTime="2025-09-30 00:37:57.04373409 +0000 UTC m=+7881.354022913" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.520598 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.609216 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/41047bf4-d616-4327-b2b7-edd10324c5f4-ovncontroller-config-0\") pod \"41047bf4-d616-4327-b2b7-edd10324c5f4\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.609467 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ceph\") pod \"41047bf4-d616-4327-b2b7-edd10324c5f4\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.609540 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-inventory\") pod \"41047bf4-d616-4327-b2b7-edd10324c5f4\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.609690 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqc5q\" (UniqueName: \"kubernetes.io/projected/41047bf4-d616-4327-b2b7-edd10324c5f4-kube-api-access-mqc5q\") pod \"41047bf4-d616-4327-b2b7-edd10324c5f4\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.609845 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ovn-combined-ca-bundle\") pod \"41047bf4-d616-4327-b2b7-edd10324c5f4\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.610050 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ssh-key\") pod \"41047bf4-d616-4327-b2b7-edd10324c5f4\" (UID: \"41047bf4-d616-4327-b2b7-edd10324c5f4\") " Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.615814 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ceph" (OuterVolumeSpecName: "ceph") pod "41047bf4-d616-4327-b2b7-edd10324c5f4" (UID: "41047bf4-d616-4327-b2b7-edd10324c5f4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.616510 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "41047bf4-d616-4327-b2b7-edd10324c5f4" (UID: "41047bf4-d616-4327-b2b7-edd10324c5f4"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.619672 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41047bf4-d616-4327-b2b7-edd10324c5f4-kube-api-access-mqc5q" (OuterVolumeSpecName: "kube-api-access-mqc5q") pod "41047bf4-d616-4327-b2b7-edd10324c5f4" (UID: "41047bf4-d616-4327-b2b7-edd10324c5f4"). InnerVolumeSpecName "kube-api-access-mqc5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.642996 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41047bf4-d616-4327-b2b7-edd10324c5f4-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "41047bf4-d616-4327-b2b7-edd10324c5f4" (UID: "41047bf4-d616-4327-b2b7-edd10324c5f4"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.644603 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-inventory" (OuterVolumeSpecName: "inventory") pod "41047bf4-d616-4327-b2b7-edd10324c5f4" (UID: "41047bf4-d616-4327-b2b7-edd10324c5f4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.673236 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "41047bf4-d616-4327-b2b7-edd10324c5f4" (UID: "41047bf4-d616-4327-b2b7-edd10324c5f4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.714276 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.714336 4922 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/41047bf4-d616-4327-b2b7-edd10324c5f4-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.714352 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.714368 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.714384 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqc5q\" (UniqueName: \"kubernetes.io/projected/41047bf4-d616-4327-b2b7-edd10324c5f4-kube-api-access-mqc5q\") on node \"crc\" DevicePath \"\"" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.714417 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41047bf4-d616-4327-b2b7-edd10324c5f4-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.912312 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:37:58 crc kubenswrapper[4922]: I0930 00:37:58.912371 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.057074 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-f4zxv" event={"ID":"41047bf4-d616-4327-b2b7-edd10324c5f4","Type":"ContainerDied","Data":"23f7fd0578d7b0b2f355fb2a2a7eef32f47502126e75bc92aaabd4817dec7ea8"} Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.057116 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23f7fd0578d7b0b2f355fb2a2a7eef32f47502126e75bc92aaabd4817dec7ea8" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.057187 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-f4zxv" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.133128 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-4mf5z"] Sep 30 00:37:59 crc kubenswrapper[4922]: E0930 00:37:59.133892 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41047bf4-d616-4327-b2b7-edd10324c5f4" containerName="ovn-openstack-openstack-cell1" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.133920 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="41047bf4-d616-4327-b2b7-edd10324c5f4" containerName="ovn-openstack-openstack-cell1" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.134434 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="41047bf4-d616-4327-b2b7-edd10324c5f4" containerName="ovn-openstack-openstack-cell1" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.135697 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.139369 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.139734 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.140607 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.140861 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.142857 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.143204 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.145669 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-4mf5z"] Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.222569 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.224612 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.224710 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.224819 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.224873 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.224932 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.225077 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.225178 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qc2p\" (UniqueName: \"kubernetes.io/projected/1ecf144b-092f-40a8-a132-f212de350a0e-kube-api-access-4qc2p\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.277262 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.328044 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.328179 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qc2p\" (UniqueName: \"kubernetes.io/projected/1ecf144b-092f-40a8-a132-f212de350a0e-kube-api-access-4qc2p\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.328504 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.328635 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.328680 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.328710 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.328768 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.332260 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.332493 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.332871 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.333021 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.333056 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.334108 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.348591 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qc2p\" (UniqueName: \"kubernetes.io/projected/1ecf144b-092f-40a8-a132-f212de350a0e-kube-api-access-4qc2p\") pod \"neutron-metadata-openstack-openstack-cell1-4mf5z\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.461505 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rjlqc"] Sep 30 00:37:59 crc kubenswrapper[4922]: I0930 00:37:59.476066 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:38:00 crc kubenswrapper[4922]: I0930 00:38:00.088872 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-4mf5z"] Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.084880 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" event={"ID":"1ecf144b-092f-40a8-a132-f212de350a0e","Type":"ContainerStarted","Data":"078b0e0505cea9346a102c53371235087455e28477168390e1935285e4a38d55"} Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.085195 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" event={"ID":"1ecf144b-092f-40a8-a132-f212de350a0e","Type":"ContainerStarted","Data":"e6b5d94d15407d52629d5da072a25e3ce74aa9d46b6f29cc3b02eb3af28afcce"} Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.085045 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rjlqc" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="registry-server" containerID="cri-o://7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15" gracePeriod=2 Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.117036 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" podStartSLOduration=1.897575061 podStartE2EDuration="2.117011356s" podCreationTimestamp="2025-09-30 00:37:59 +0000 UTC" firstStartedPulling="2025-09-30 00:38:00.092617631 +0000 UTC m=+7884.402906454" lastFinishedPulling="2025-09-30 00:38:00.312053936 +0000 UTC m=+7884.622342749" observedRunningTime="2025-09-30 00:38:01.107379497 +0000 UTC m=+7885.417668350" watchObservedRunningTime="2025-09-30 00:38:01.117011356 +0000 UTC m=+7885.427300209" Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.646156 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.787047 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x4s5\" (UniqueName: \"kubernetes.io/projected/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-kube-api-access-7x4s5\") pod \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.787324 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-utilities\") pod \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.787360 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-catalog-content\") pod \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\" (UID: \"09aac2b7-7cd3-4fc7-b075-7c57095ac51f\") " Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.788049 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-utilities" (OuterVolumeSpecName: "utilities") pod "09aac2b7-7cd3-4fc7-b075-7c57095ac51f" (UID: "09aac2b7-7cd3-4fc7-b075-7c57095ac51f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.792519 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-kube-api-access-7x4s5" (OuterVolumeSpecName: "kube-api-access-7x4s5") pod "09aac2b7-7cd3-4fc7-b075-7c57095ac51f" (UID: "09aac2b7-7cd3-4fc7-b075-7c57095ac51f"). InnerVolumeSpecName "kube-api-access-7x4s5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.875572 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09aac2b7-7cd3-4fc7-b075-7c57095ac51f" (UID: "09aac2b7-7cd3-4fc7-b075-7c57095ac51f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.890638 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x4s5\" (UniqueName: \"kubernetes.io/projected/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-kube-api-access-7x4s5\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.890680 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:01 crc kubenswrapper[4922]: I0930 00:38:01.890696 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09aac2b7-7cd3-4fc7-b075-7c57095ac51f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.098136 4922 generic.go:334] "Generic (PLEG): container finished" podID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerID="7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15" exitCode=0 Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.099095 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rjlqc" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.101537 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjlqc" event={"ID":"09aac2b7-7cd3-4fc7-b075-7c57095ac51f","Type":"ContainerDied","Data":"7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15"} Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.101622 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rjlqc" event={"ID":"09aac2b7-7cd3-4fc7-b075-7c57095ac51f","Type":"ContainerDied","Data":"2d0093e56e07d4660907f4deea430af98086726331842d014a8522b2429f008e"} Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.101670 4922 scope.go:117] "RemoveContainer" containerID="7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.138159 4922 scope.go:117] "RemoveContainer" containerID="3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.157302 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rjlqc"] Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.172989 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rjlqc"] Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.193983 4922 scope.go:117] "RemoveContainer" containerID="53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.234870 4922 scope.go:117] "RemoveContainer" containerID="7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15" Sep 30 00:38:02 crc kubenswrapper[4922]: E0930 00:38:02.235721 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15\": container with ID starting with 7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15 not found: ID does not exist" containerID="7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.235759 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15"} err="failed to get container status \"7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15\": rpc error: code = NotFound desc = could not find container \"7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15\": container with ID starting with 7062b8a05ece0bd4877a6a036f9cbc8360248fa89d67073372107af0a514aa15 not found: ID does not exist" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.235785 4922 scope.go:117] "RemoveContainer" containerID="3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff" Sep 30 00:38:02 crc kubenswrapper[4922]: E0930 00:38:02.236222 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff\": container with ID starting with 3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff not found: ID does not exist" containerID="3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.236276 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff"} err="failed to get container status \"3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff\": rpc error: code = NotFound desc = could not find container \"3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff\": container with ID starting with 3690f464ccdc857c4b7839f6d19298fea30f6e474eef97302be9af21a98443ff not found: ID does not exist" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.236312 4922 scope.go:117] "RemoveContainer" containerID="53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da" Sep 30 00:38:02 crc kubenswrapper[4922]: E0930 00:38:02.236961 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da\": container with ID starting with 53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da not found: ID does not exist" containerID="53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.236995 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da"} err="failed to get container status \"53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da\": rpc error: code = NotFound desc = could not find container \"53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da\": container with ID starting with 53239948f8be1dd6971ea1264a6bbbcc36c2e83e7c4a6dd3045ddb86d64755da not found: ID does not exist" Sep 30 00:38:02 crc kubenswrapper[4922]: I0930 00:38:02.436212 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" path="/var/lib/kubelet/pods/09aac2b7-7cd3-4fc7-b075-7c57095ac51f/volumes" Sep 30 00:38:28 crc kubenswrapper[4922]: I0930 00:38:28.912889 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:38:28 crc kubenswrapper[4922]: I0930 00:38:28.913797 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:38:55 crc kubenswrapper[4922]: I0930 00:38:55.823654 4922 generic.go:334] "Generic (PLEG): container finished" podID="1ecf144b-092f-40a8-a132-f212de350a0e" containerID="078b0e0505cea9346a102c53371235087455e28477168390e1935285e4a38d55" exitCode=0 Sep 30 00:38:55 crc kubenswrapper[4922]: I0930 00:38:55.823766 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" event={"ID":"1ecf144b-092f-40a8-a132-f212de350a0e","Type":"ContainerDied","Data":"078b0e0505cea9346a102c53371235087455e28477168390e1935285e4a38d55"} Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.313860 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.369201 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ssh-key\") pod \"1ecf144b-092f-40a8-a132-f212de350a0e\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.369364 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-ovn-metadata-agent-neutron-config-0\") pod \"1ecf144b-092f-40a8-a132-f212de350a0e\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.369528 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ceph\") pod \"1ecf144b-092f-40a8-a132-f212de350a0e\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.369620 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-nova-metadata-neutron-config-0\") pod \"1ecf144b-092f-40a8-a132-f212de350a0e\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.370323 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qc2p\" (UniqueName: \"kubernetes.io/projected/1ecf144b-092f-40a8-a132-f212de350a0e-kube-api-access-4qc2p\") pod \"1ecf144b-092f-40a8-a132-f212de350a0e\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.370382 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-metadata-combined-ca-bundle\") pod \"1ecf144b-092f-40a8-a132-f212de350a0e\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.370451 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-inventory\") pod \"1ecf144b-092f-40a8-a132-f212de350a0e\" (UID: \"1ecf144b-092f-40a8-a132-f212de350a0e\") " Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.377519 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ceph" (OuterVolumeSpecName: "ceph") pod "1ecf144b-092f-40a8-a132-f212de350a0e" (UID: "1ecf144b-092f-40a8-a132-f212de350a0e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.378093 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ecf144b-092f-40a8-a132-f212de350a0e-kube-api-access-4qc2p" (OuterVolumeSpecName: "kube-api-access-4qc2p") pod "1ecf144b-092f-40a8-a132-f212de350a0e" (UID: "1ecf144b-092f-40a8-a132-f212de350a0e"). InnerVolumeSpecName "kube-api-access-4qc2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.378891 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "1ecf144b-092f-40a8-a132-f212de350a0e" (UID: "1ecf144b-092f-40a8-a132-f212de350a0e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.428527 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "1ecf144b-092f-40a8-a132-f212de350a0e" (UID: "1ecf144b-092f-40a8-a132-f212de350a0e"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.429953 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "1ecf144b-092f-40a8-a132-f212de350a0e" (UID: "1ecf144b-092f-40a8-a132-f212de350a0e"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.430035 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-inventory" (OuterVolumeSpecName: "inventory") pod "1ecf144b-092f-40a8-a132-f212de350a0e" (UID: "1ecf144b-092f-40a8-a132-f212de350a0e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.433699 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1ecf144b-092f-40a8-a132-f212de350a0e" (UID: "1ecf144b-092f-40a8-a132-f212de350a0e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.474363 4922 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.474546 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.474627 4922 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.474705 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qc2p\" (UniqueName: \"kubernetes.io/projected/1ecf144b-092f-40a8-a132-f212de350a0e-kube-api-access-4qc2p\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.474800 4922 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.474892 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.474971 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ecf144b-092f-40a8-a132-f212de350a0e-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.851882 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" event={"ID":"1ecf144b-092f-40a8-a132-f212de350a0e","Type":"ContainerDied","Data":"e6b5d94d15407d52629d5da072a25e3ce74aa9d46b6f29cc3b02eb3af28afcce"} Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.851958 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6b5d94d15407d52629d5da072a25e3ce74aa9d46b6f29cc3b02eb3af28afcce" Sep 30 00:38:57 crc kubenswrapper[4922]: I0930 00:38:57.851980 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-4mf5z" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.026506 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-ptd8n"] Sep 30 00:38:58 crc kubenswrapper[4922]: E0930 00:38:58.027707 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="extract-utilities" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.027733 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="extract-utilities" Sep 30 00:38:58 crc kubenswrapper[4922]: E0930 00:38:58.027798 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="extract-content" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.027807 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="extract-content" Sep 30 00:38:58 crc kubenswrapper[4922]: E0930 00:38:58.027821 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ecf144b-092f-40a8-a132-f212de350a0e" containerName="neutron-metadata-openstack-openstack-cell1" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.027831 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ecf144b-092f-40a8-a132-f212de350a0e" containerName="neutron-metadata-openstack-openstack-cell1" Sep 30 00:38:58 crc kubenswrapper[4922]: E0930 00:38:58.027843 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="registry-server" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.027851 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="registry-server" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.028154 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ecf144b-092f-40a8-a132-f212de350a0e" containerName="neutron-metadata-openstack-openstack-cell1" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.028204 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="09aac2b7-7cd3-4fc7-b075-7c57095ac51f" containerName="registry-server" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.029430 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.031560 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.031792 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.032029 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.032858 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.032887 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.038716 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-ptd8n"] Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.089180 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.089380 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.089454 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgrtg\" (UniqueName: \"kubernetes.io/projected/5935a421-fbf8-44a7-b65c-fa9bfa84124d-kube-api-access-wgrtg\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.089873 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-inventory\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.089936 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ceph\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.090091 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ssh-key\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.192088 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-inventory\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.192141 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ceph\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.192197 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ssh-key\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.192234 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.192268 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.192286 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgrtg\" (UniqueName: \"kubernetes.io/projected/5935a421-fbf8-44a7-b65c-fa9bfa84124d-kube-api-access-wgrtg\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.197548 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ceph\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.197617 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ssh-key\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.197895 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.198193 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-inventory\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.198261 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.215475 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgrtg\" (UniqueName: \"kubernetes.io/projected/5935a421-fbf8-44a7-b65c-fa9bfa84124d-kube-api-access-wgrtg\") pod \"libvirt-openstack-openstack-cell1-ptd8n\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.368829 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.913205 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.914082 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.914135 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.915117 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"701d764a4a96ad9e037fd51fa1aa45fdf94245706ac842becc5cb33c81251bc3"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.915180 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://701d764a4a96ad9e037fd51fa1aa45fdf94245706ac842becc5cb33c81251bc3" gracePeriod=600 Sep 30 00:38:58 crc kubenswrapper[4922]: I0930 00:38:58.990191 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-ptd8n"] Sep 30 00:38:59 crc kubenswrapper[4922]: I0930 00:38:59.885871 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="701d764a4a96ad9e037fd51fa1aa45fdf94245706ac842becc5cb33c81251bc3" exitCode=0 Sep 30 00:38:59 crc kubenswrapper[4922]: I0930 00:38:59.885958 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"701d764a4a96ad9e037fd51fa1aa45fdf94245706ac842becc5cb33c81251bc3"} Sep 30 00:38:59 crc kubenswrapper[4922]: I0930 00:38:59.886764 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1"} Sep 30 00:38:59 crc kubenswrapper[4922]: I0930 00:38:59.886797 4922 scope.go:117] "RemoveContainer" containerID="f7cdce018b483f567413a5eba219657c63e3ea3d87da1df9fb2adfb7bf7e1ae6" Sep 30 00:38:59 crc kubenswrapper[4922]: I0930 00:38:59.890152 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" event={"ID":"5935a421-fbf8-44a7-b65c-fa9bfa84124d","Type":"ContainerStarted","Data":"60e57c972b1ba219456e51bfb43c399a72e723bb961e6dcffbca700723eea44e"} Sep 30 00:38:59 crc kubenswrapper[4922]: I0930 00:38:59.890645 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" event={"ID":"5935a421-fbf8-44a7-b65c-fa9bfa84124d","Type":"ContainerStarted","Data":"4e8a4f1deea46758c26237d7ab26b12ea0f3c646b7f5817555031124ea790bf6"} Sep 30 00:38:59 crc kubenswrapper[4922]: I0930 00:38:59.948492 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" podStartSLOduration=2.736744726 podStartE2EDuration="2.948472791s" podCreationTimestamp="2025-09-30 00:38:57 +0000 UTC" firstStartedPulling="2025-09-30 00:38:58.991904463 +0000 UTC m=+7943.302193286" lastFinishedPulling="2025-09-30 00:38:59.203632538 +0000 UTC m=+7943.513921351" observedRunningTime="2025-09-30 00:38:59.942950965 +0000 UTC m=+7944.253239788" watchObservedRunningTime="2025-09-30 00:38:59.948472791 +0000 UTC m=+7944.258761614" Sep 30 00:41:28 crc kubenswrapper[4922]: I0930 00:41:28.912548 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:41:28 crc kubenswrapper[4922]: I0930 00:41:28.913286 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:41:58 crc kubenswrapper[4922]: I0930 00:41:58.912557 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:41:58 crc kubenswrapper[4922]: I0930 00:41:58.913262 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.800526 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gs75s"] Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.804362 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.830970 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gs75s"] Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.912722 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.912793 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.912846 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.913812 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.913870 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" gracePeriod=600 Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.990450 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2m6l\" (UniqueName: \"kubernetes.io/projected/e71331d1-a672-44b2-82b7-d1ff417c263d-kube-api-access-r2m6l\") pod \"certified-operators-gs75s\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.990870 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-utilities\") pod \"certified-operators-gs75s\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:28 crc kubenswrapper[4922]: I0930 00:42:28.990927 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-catalog-content\") pod \"certified-operators-gs75s\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:29 crc kubenswrapper[4922]: E0930 00:42:29.031854 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.092401 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2m6l\" (UniqueName: \"kubernetes.io/projected/e71331d1-a672-44b2-82b7-d1ff417c263d-kube-api-access-r2m6l\") pod \"certified-operators-gs75s\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.092587 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-utilities\") pod \"certified-operators-gs75s\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.092662 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-catalog-content\") pod \"certified-operators-gs75s\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.094546 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-utilities\") pod \"certified-operators-gs75s\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.094817 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-catalog-content\") pod \"certified-operators-gs75s\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.114172 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2m6l\" (UniqueName: \"kubernetes.io/projected/e71331d1-a672-44b2-82b7-d1ff417c263d-kube-api-access-r2m6l\") pod \"certified-operators-gs75s\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.143881 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.513946 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" exitCode=0 Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.514030 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1"} Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.514235 4922 scope.go:117] "RemoveContainer" containerID="701d764a4a96ad9e037fd51fa1aa45fdf94245706ac842becc5cb33c81251bc3" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.514911 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:42:29 crc kubenswrapper[4922]: E0930 00:42:29.515157 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:42:29 crc kubenswrapper[4922]: I0930 00:42:29.657358 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gs75s"] Sep 30 00:42:30 crc kubenswrapper[4922]: I0930 00:42:30.536572 4922 generic.go:334] "Generic (PLEG): container finished" podID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerID="c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125" exitCode=0 Sep 30 00:42:30 crc kubenswrapper[4922]: I0930 00:42:30.536674 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gs75s" event={"ID":"e71331d1-a672-44b2-82b7-d1ff417c263d","Type":"ContainerDied","Data":"c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125"} Sep 30 00:42:30 crc kubenswrapper[4922]: I0930 00:42:30.537162 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gs75s" event={"ID":"e71331d1-a672-44b2-82b7-d1ff417c263d","Type":"ContainerStarted","Data":"0d2f4f50b23c7d88ee7655984eea21776a26fe3d7ee93fab3c500eaf646dc6af"} Sep 30 00:42:30 crc kubenswrapper[4922]: I0930 00:42:30.541880 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.190131 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g2fzt"] Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.192876 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.204310 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g2fzt"] Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.255303 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-catalog-content\") pod \"redhat-marketplace-g2fzt\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.255650 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlk8k\" (UniqueName: \"kubernetes.io/projected/d815f75c-d6c2-43c3-918b-a838b7288e10-kube-api-access-jlk8k\") pod \"redhat-marketplace-g2fzt\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.255944 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-utilities\") pod \"redhat-marketplace-g2fzt\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.358067 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlk8k\" (UniqueName: \"kubernetes.io/projected/d815f75c-d6c2-43c3-918b-a838b7288e10-kube-api-access-jlk8k\") pod \"redhat-marketplace-g2fzt\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.358570 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-utilities\") pod \"redhat-marketplace-g2fzt\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.358778 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-catalog-content\") pod \"redhat-marketplace-g2fzt\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.359133 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-utilities\") pod \"redhat-marketplace-g2fzt\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.359344 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-catalog-content\") pod \"redhat-marketplace-g2fzt\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.383727 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlk8k\" (UniqueName: \"kubernetes.io/projected/d815f75c-d6c2-43c3-918b-a838b7288e10-kube-api-access-jlk8k\") pod \"redhat-marketplace-g2fzt\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.520284 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.552718 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gs75s" event={"ID":"e71331d1-a672-44b2-82b7-d1ff417c263d","Type":"ContainerStarted","Data":"7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424"} Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.792441 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fknks"] Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.796605 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.807329 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fknks"] Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.871605 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n4pc\" (UniqueName: \"kubernetes.io/projected/9a7c6c59-9a6c-47f5-a914-c741d54371e6-kube-api-access-2n4pc\") pod \"community-operators-fknks\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.871702 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-utilities\") pod \"community-operators-fknks\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.871785 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-catalog-content\") pod \"community-operators-fknks\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.974944 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-catalog-content\") pod \"community-operators-fknks\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.975065 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n4pc\" (UniqueName: \"kubernetes.io/projected/9a7c6c59-9a6c-47f5-a914-c741d54371e6-kube-api-access-2n4pc\") pod \"community-operators-fknks\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.975146 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-utilities\") pod \"community-operators-fknks\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.975284 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g2fzt"] Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.975832 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-utilities\") pod \"community-operators-fknks\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.975927 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-catalog-content\") pod \"community-operators-fknks\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:31 crc kubenswrapper[4922]: I0930 00:42:31.993552 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n4pc\" (UniqueName: \"kubernetes.io/projected/9a7c6c59-9a6c-47f5-a914-c741d54371e6-kube-api-access-2n4pc\") pod \"community-operators-fknks\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:32 crc kubenswrapper[4922]: I0930 00:42:32.126332 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:32 crc kubenswrapper[4922]: I0930 00:42:32.572948 4922 generic.go:334] "Generic (PLEG): container finished" podID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerID="4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1" exitCode=0 Sep 30 00:42:32 crc kubenswrapper[4922]: I0930 00:42:32.573080 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g2fzt" event={"ID":"d815f75c-d6c2-43c3-918b-a838b7288e10","Type":"ContainerDied","Data":"4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1"} Sep 30 00:42:32 crc kubenswrapper[4922]: I0930 00:42:32.573734 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g2fzt" event={"ID":"d815f75c-d6c2-43c3-918b-a838b7288e10","Type":"ContainerStarted","Data":"8f651770eef22be3a9e3570fb73487f4e8495561a4b5d7abada2f6f7d0c1ba39"} Sep 30 00:42:32 crc kubenswrapper[4922]: I0930 00:42:32.655926 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fknks"] Sep 30 00:42:33 crc kubenswrapper[4922]: I0930 00:42:33.584332 4922 generic.go:334] "Generic (PLEG): container finished" podID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerID="f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b" exitCode=0 Sep 30 00:42:33 crc kubenswrapper[4922]: I0930 00:42:33.584527 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fknks" event={"ID":"9a7c6c59-9a6c-47f5-a914-c741d54371e6","Type":"ContainerDied","Data":"f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b"} Sep 30 00:42:33 crc kubenswrapper[4922]: I0930 00:42:33.584938 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fknks" event={"ID":"9a7c6c59-9a6c-47f5-a914-c741d54371e6","Type":"ContainerStarted","Data":"1a3a77744b7d64476b22695a26f2a3bdaa05980632c17d620ecd2a7f3fc7dc44"} Sep 30 00:42:33 crc kubenswrapper[4922]: I0930 00:42:33.591234 4922 generic.go:334] "Generic (PLEG): container finished" podID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerID="7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424" exitCode=0 Sep 30 00:42:33 crc kubenswrapper[4922]: I0930 00:42:33.591281 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gs75s" event={"ID":"e71331d1-a672-44b2-82b7-d1ff417c263d","Type":"ContainerDied","Data":"7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424"} Sep 30 00:42:34 crc kubenswrapper[4922]: I0930 00:42:34.608653 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fknks" event={"ID":"9a7c6c59-9a6c-47f5-a914-c741d54371e6","Type":"ContainerStarted","Data":"3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a"} Sep 30 00:42:34 crc kubenswrapper[4922]: I0930 00:42:34.612477 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gs75s" event={"ID":"e71331d1-a672-44b2-82b7-d1ff417c263d","Type":"ContainerStarted","Data":"918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1"} Sep 30 00:42:34 crc kubenswrapper[4922]: I0930 00:42:34.620174 4922 generic.go:334] "Generic (PLEG): container finished" podID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerID="bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036" exitCode=0 Sep 30 00:42:34 crc kubenswrapper[4922]: I0930 00:42:34.620245 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g2fzt" event={"ID":"d815f75c-d6c2-43c3-918b-a838b7288e10","Type":"ContainerDied","Data":"bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036"} Sep 30 00:42:34 crc kubenswrapper[4922]: I0930 00:42:34.675767 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gs75s" podStartSLOduration=3.223055747 podStartE2EDuration="6.675740641s" podCreationTimestamp="2025-09-30 00:42:28 +0000 UTC" firstStartedPulling="2025-09-30 00:42:30.541689033 +0000 UTC m=+8154.851977846" lastFinishedPulling="2025-09-30 00:42:33.994373907 +0000 UTC m=+8158.304662740" observedRunningTime="2025-09-30 00:42:34.672545742 +0000 UTC m=+8158.982834555" watchObservedRunningTime="2025-09-30 00:42:34.675740641 +0000 UTC m=+8158.986029474" Sep 30 00:42:35 crc kubenswrapper[4922]: I0930 00:42:35.663552 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g2fzt" event={"ID":"d815f75c-d6c2-43c3-918b-a838b7288e10","Type":"ContainerStarted","Data":"6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4"} Sep 30 00:42:35 crc kubenswrapper[4922]: I0930 00:42:35.686142 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g2fzt" podStartSLOduration=2.120014481 podStartE2EDuration="4.686122238s" podCreationTimestamp="2025-09-30 00:42:31 +0000 UTC" firstStartedPulling="2025-09-30 00:42:32.577702145 +0000 UTC m=+8156.887990958" lastFinishedPulling="2025-09-30 00:42:35.143809892 +0000 UTC m=+8159.454098715" observedRunningTime="2025-09-30 00:42:35.685774269 +0000 UTC m=+8159.996063092" watchObservedRunningTime="2025-09-30 00:42:35.686122238 +0000 UTC m=+8159.996411051" Sep 30 00:42:37 crc kubenswrapper[4922]: I0930 00:42:37.687176 4922 generic.go:334] "Generic (PLEG): container finished" podID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerID="3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a" exitCode=0 Sep 30 00:42:37 crc kubenswrapper[4922]: I0930 00:42:37.687775 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fknks" event={"ID":"9a7c6c59-9a6c-47f5-a914-c741d54371e6","Type":"ContainerDied","Data":"3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a"} Sep 30 00:42:38 crc kubenswrapper[4922]: I0930 00:42:38.701512 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fknks" event={"ID":"9a7c6c59-9a6c-47f5-a914-c741d54371e6","Type":"ContainerStarted","Data":"7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3"} Sep 30 00:42:38 crc kubenswrapper[4922]: I0930 00:42:38.725412 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fknks" podStartSLOduration=3.219479761 podStartE2EDuration="7.725382262s" podCreationTimestamp="2025-09-30 00:42:31 +0000 UTC" firstStartedPulling="2025-09-30 00:42:33.586727139 +0000 UTC m=+8157.897015952" lastFinishedPulling="2025-09-30 00:42:38.09262964 +0000 UTC m=+8162.402918453" observedRunningTime="2025-09-30 00:42:38.723949347 +0000 UTC m=+8163.034238160" watchObservedRunningTime="2025-09-30 00:42:38.725382262 +0000 UTC m=+8163.035671075" Sep 30 00:42:39 crc kubenswrapper[4922]: I0930 00:42:39.144324 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:39 crc kubenswrapper[4922]: I0930 00:42:39.144800 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:39 crc kubenswrapper[4922]: I0930 00:42:39.219663 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:39 crc kubenswrapper[4922]: I0930 00:42:39.787029 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:41 crc kubenswrapper[4922]: I0930 00:42:41.520787 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:41 crc kubenswrapper[4922]: I0930 00:42:41.521087 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:41 crc kubenswrapper[4922]: I0930 00:42:41.608237 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:41 crc kubenswrapper[4922]: I0930 00:42:41.778211 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gs75s"] Sep 30 00:42:41 crc kubenswrapper[4922]: I0930 00:42:41.778729 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gs75s" podUID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerName="registry-server" containerID="cri-o://918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1" gracePeriod=2 Sep 30 00:42:41 crc kubenswrapper[4922]: I0930 00:42:41.811267 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.126839 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.127824 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.222209 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.401522 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.512002 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-utilities\") pod \"e71331d1-a672-44b2-82b7-d1ff417c263d\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.512226 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2m6l\" (UniqueName: \"kubernetes.io/projected/e71331d1-a672-44b2-82b7-d1ff417c263d-kube-api-access-r2m6l\") pod \"e71331d1-a672-44b2-82b7-d1ff417c263d\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.512338 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-catalog-content\") pod \"e71331d1-a672-44b2-82b7-d1ff417c263d\" (UID: \"e71331d1-a672-44b2-82b7-d1ff417c263d\") " Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.513010 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-utilities" (OuterVolumeSpecName: "utilities") pod "e71331d1-a672-44b2-82b7-d1ff417c263d" (UID: "e71331d1-a672-44b2-82b7-d1ff417c263d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.518606 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e71331d1-a672-44b2-82b7-d1ff417c263d-kube-api-access-r2m6l" (OuterVolumeSpecName: "kube-api-access-r2m6l") pod "e71331d1-a672-44b2-82b7-d1ff417c263d" (UID: "e71331d1-a672-44b2-82b7-d1ff417c263d"). InnerVolumeSpecName "kube-api-access-r2m6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.560360 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e71331d1-a672-44b2-82b7-d1ff417c263d" (UID: "e71331d1-a672-44b2-82b7-d1ff417c263d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.615774 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.615824 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e71331d1-a672-44b2-82b7-d1ff417c263d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.615843 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2m6l\" (UniqueName: \"kubernetes.io/projected/e71331d1-a672-44b2-82b7-d1ff417c263d-kube-api-access-r2m6l\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.743915 4922 generic.go:334] "Generic (PLEG): container finished" podID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerID="918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1" exitCode=0 Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.743991 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gs75s" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.743976 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gs75s" event={"ID":"e71331d1-a672-44b2-82b7-d1ff417c263d","Type":"ContainerDied","Data":"918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1"} Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.744231 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gs75s" event={"ID":"e71331d1-a672-44b2-82b7-d1ff417c263d","Type":"ContainerDied","Data":"0d2f4f50b23c7d88ee7655984eea21776a26fe3d7ee93fab3c500eaf646dc6af"} Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.744274 4922 scope.go:117] "RemoveContainer" containerID="918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.785092 4922 scope.go:117] "RemoveContainer" containerID="7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.799231 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gs75s"] Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.814442 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gs75s"] Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.823953 4922 scope.go:117] "RemoveContainer" containerID="c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.872636 4922 scope.go:117] "RemoveContainer" containerID="918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1" Sep 30 00:42:42 crc kubenswrapper[4922]: E0930 00:42:42.873144 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1\": container with ID starting with 918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1 not found: ID does not exist" containerID="918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.873194 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1"} err="failed to get container status \"918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1\": rpc error: code = NotFound desc = could not find container \"918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1\": container with ID starting with 918610f3411bb3a112851302f575854fd7d8b2f90e7b87fc4e1fa8212274f0f1 not found: ID does not exist" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.873225 4922 scope.go:117] "RemoveContainer" containerID="7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424" Sep 30 00:42:42 crc kubenswrapper[4922]: E0930 00:42:42.874081 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424\": container with ID starting with 7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424 not found: ID does not exist" containerID="7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.874145 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424"} err="failed to get container status \"7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424\": rpc error: code = NotFound desc = could not find container \"7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424\": container with ID starting with 7c475e9c8aa61a67305a0048060201f548ed466cdd6e60bfd17a78962a12b424 not found: ID does not exist" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.874178 4922 scope.go:117] "RemoveContainer" containerID="c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125" Sep 30 00:42:42 crc kubenswrapper[4922]: E0930 00:42:42.874554 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125\": container with ID starting with c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125 not found: ID does not exist" containerID="c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125" Sep 30 00:42:42 crc kubenswrapper[4922]: I0930 00:42:42.874622 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125"} err="failed to get container status \"c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125\": rpc error: code = NotFound desc = could not find container \"c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125\": container with ID starting with c973677dde6b2e9c9272ec8af80c3c85cb3e789620fffc7543a7bbed70140125 not found: ID does not exist" Sep 30 00:42:43 crc kubenswrapper[4922]: I0930 00:42:43.830269 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:43 crc kubenswrapper[4922]: I0930 00:42:43.978830 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g2fzt"] Sep 30 00:42:43 crc kubenswrapper[4922]: I0930 00:42:43.979108 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g2fzt" podUID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerName="registry-server" containerID="cri-o://6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4" gracePeriod=2 Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.422777 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:42:44 crc kubenswrapper[4922]: E0930 00:42:44.423353 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.440029 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e71331d1-a672-44b2-82b7-d1ff417c263d" path="/var/lib/kubelet/pods/e71331d1-a672-44b2-82b7-d1ff417c263d/volumes" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.573929 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.664648 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-utilities\") pod \"d815f75c-d6c2-43c3-918b-a838b7288e10\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.664991 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlk8k\" (UniqueName: \"kubernetes.io/projected/d815f75c-d6c2-43c3-918b-a838b7288e10-kube-api-access-jlk8k\") pod \"d815f75c-d6c2-43c3-918b-a838b7288e10\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.665104 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-catalog-content\") pod \"d815f75c-d6c2-43c3-918b-a838b7288e10\" (UID: \"d815f75c-d6c2-43c3-918b-a838b7288e10\") " Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.665789 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-utilities" (OuterVolumeSpecName: "utilities") pod "d815f75c-d6c2-43c3-918b-a838b7288e10" (UID: "d815f75c-d6c2-43c3-918b-a838b7288e10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.670459 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d815f75c-d6c2-43c3-918b-a838b7288e10-kube-api-access-jlk8k" (OuterVolumeSpecName: "kube-api-access-jlk8k") pod "d815f75c-d6c2-43c3-918b-a838b7288e10" (UID: "d815f75c-d6c2-43c3-918b-a838b7288e10"). InnerVolumeSpecName "kube-api-access-jlk8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.679111 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d815f75c-d6c2-43c3-918b-a838b7288e10" (UID: "d815f75c-d6c2-43c3-918b-a838b7288e10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.768969 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.769025 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlk8k\" (UniqueName: \"kubernetes.io/projected/d815f75c-d6c2-43c3-918b-a838b7288e10-kube-api-access-jlk8k\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.769048 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d815f75c-d6c2-43c3-918b-a838b7288e10-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.771819 4922 generic.go:334] "Generic (PLEG): container finished" podID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerID="6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4" exitCode=0 Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.771873 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g2fzt" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.771917 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g2fzt" event={"ID":"d815f75c-d6c2-43c3-918b-a838b7288e10","Type":"ContainerDied","Data":"6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4"} Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.771968 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g2fzt" event={"ID":"d815f75c-d6c2-43c3-918b-a838b7288e10","Type":"ContainerDied","Data":"8f651770eef22be3a9e3570fb73487f4e8495561a4b5d7abada2f6f7d0c1ba39"} Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.771989 4922 scope.go:117] "RemoveContainer" containerID="6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.805007 4922 scope.go:117] "RemoveContainer" containerID="bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.817038 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g2fzt"] Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.830247 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g2fzt"] Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.830631 4922 scope.go:117] "RemoveContainer" containerID="4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.883463 4922 scope.go:117] "RemoveContainer" containerID="6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4" Sep 30 00:42:44 crc kubenswrapper[4922]: E0930 00:42:44.883919 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4\": container with ID starting with 6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4 not found: ID does not exist" containerID="6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.883952 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4"} err="failed to get container status \"6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4\": rpc error: code = NotFound desc = could not find container \"6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4\": container with ID starting with 6f49c667d528a75a8cabc3eb7cf3a9004e005c94992cee8573d3fbb83fb937f4 not found: ID does not exist" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.883974 4922 scope.go:117] "RemoveContainer" containerID="bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036" Sep 30 00:42:44 crc kubenswrapper[4922]: E0930 00:42:44.884202 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036\": container with ID starting with bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036 not found: ID does not exist" containerID="bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.884225 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036"} err="failed to get container status \"bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036\": rpc error: code = NotFound desc = could not find container \"bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036\": container with ID starting with bbfda923a4239c7949744fed0235701a31d584e0c43e2ba8266f7f4276168036 not found: ID does not exist" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.884240 4922 scope.go:117] "RemoveContainer" containerID="4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1" Sep 30 00:42:44 crc kubenswrapper[4922]: E0930 00:42:44.884643 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1\": container with ID starting with 4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1 not found: ID does not exist" containerID="4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1" Sep 30 00:42:44 crc kubenswrapper[4922]: I0930 00:42:44.884668 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1"} err="failed to get container status \"4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1\": rpc error: code = NotFound desc = could not find container \"4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1\": container with ID starting with 4f4dff50848df073a294c5288da8e34a6a56ea760e3c5ee28089ee94d46006d1 not found: ID does not exist" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.184633 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fknks"] Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.185326 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fknks" podUID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerName="registry-server" containerID="cri-o://7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3" gracePeriod=2 Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.440817 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d815f75c-d6c2-43c3-918b-a838b7288e10" path="/var/lib/kubelet/pods/d815f75c-d6c2-43c3-918b-a838b7288e10/volumes" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.765277 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.801131 4922 generic.go:334] "Generic (PLEG): container finished" podID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerID="7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3" exitCode=0 Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.801201 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fknks" event={"ID":"9a7c6c59-9a6c-47f5-a914-c741d54371e6","Type":"ContainerDied","Data":"7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3"} Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.801246 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fknks" event={"ID":"9a7c6c59-9a6c-47f5-a914-c741d54371e6","Type":"ContainerDied","Data":"1a3a77744b7d64476b22695a26f2a3bdaa05980632c17d620ecd2a7f3fc7dc44"} Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.801280 4922 scope.go:117] "RemoveContainer" containerID="7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.801530 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fknks" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.833285 4922 scope.go:117] "RemoveContainer" containerID="3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.878658 4922 scope.go:117] "RemoveContainer" containerID="f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.913972 4922 scope.go:117] "RemoveContainer" containerID="7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3" Sep 30 00:42:46 crc kubenswrapper[4922]: E0930 00:42:46.914417 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3\": container with ID starting with 7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3 not found: ID does not exist" containerID="7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.914452 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3"} err="failed to get container status \"7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3\": rpc error: code = NotFound desc = could not find container \"7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3\": container with ID starting with 7f6082a0de60a3ae4d6c4c50d6b2161f84d6de5e4ee70eed1cedfb45763bd5e3 not found: ID does not exist" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.914496 4922 scope.go:117] "RemoveContainer" containerID="3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a" Sep 30 00:42:46 crc kubenswrapper[4922]: E0930 00:42:46.914793 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a\": container with ID starting with 3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a not found: ID does not exist" containerID="3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.914816 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a"} err="failed to get container status \"3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a\": rpc error: code = NotFound desc = could not find container \"3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a\": container with ID starting with 3b287d13b63ec7142662bb96e07c458a1ef2d1c0eea1928b40b88e169aa2339a not found: ID does not exist" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.914830 4922 scope.go:117] "RemoveContainer" containerID="f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b" Sep 30 00:42:46 crc kubenswrapper[4922]: E0930 00:42:46.915288 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b\": container with ID starting with f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b not found: ID does not exist" containerID="f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.915435 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b"} err="failed to get container status \"f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b\": rpc error: code = NotFound desc = could not find container \"f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b\": container with ID starting with f9f50162aadbec2a4efc033b46618a017ed3b80bb9bd82897da05ae92b7c8d8b not found: ID does not exist" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.933504 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-catalog-content\") pod \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.933795 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-utilities\") pod \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.933953 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2n4pc\" (UniqueName: \"kubernetes.io/projected/9a7c6c59-9a6c-47f5-a914-c741d54371e6-kube-api-access-2n4pc\") pod \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\" (UID: \"9a7c6c59-9a6c-47f5-a914-c741d54371e6\") " Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.935759 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-utilities" (OuterVolumeSpecName: "utilities") pod "9a7c6c59-9a6c-47f5-a914-c741d54371e6" (UID: "9a7c6c59-9a6c-47f5-a914-c741d54371e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.942108 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a7c6c59-9a6c-47f5-a914-c741d54371e6-kube-api-access-2n4pc" (OuterVolumeSpecName: "kube-api-access-2n4pc") pod "9a7c6c59-9a6c-47f5-a914-c741d54371e6" (UID: "9a7c6c59-9a6c-47f5-a914-c741d54371e6"). InnerVolumeSpecName "kube-api-access-2n4pc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:42:46 crc kubenswrapper[4922]: I0930 00:42:46.988010 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a7c6c59-9a6c-47f5-a914-c741d54371e6" (UID: "9a7c6c59-9a6c-47f5-a914-c741d54371e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:42:47 crc kubenswrapper[4922]: I0930 00:42:47.036733 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:47 crc kubenswrapper[4922]: I0930 00:42:47.036775 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2n4pc\" (UniqueName: \"kubernetes.io/projected/9a7c6c59-9a6c-47f5-a914-c741d54371e6-kube-api-access-2n4pc\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:47 crc kubenswrapper[4922]: I0930 00:42:47.036791 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a7c6c59-9a6c-47f5-a914-c741d54371e6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:42:47 crc kubenswrapper[4922]: I0930 00:42:47.137141 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fknks"] Sep 30 00:42:47 crc kubenswrapper[4922]: I0930 00:42:47.149678 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fknks"] Sep 30 00:42:48 crc kubenswrapper[4922]: I0930 00:42:48.441622 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" path="/var/lib/kubelet/pods/9a7c6c59-9a6c-47f5-a914-c741d54371e6/volumes" Sep 30 00:42:58 crc kubenswrapper[4922]: I0930 00:42:58.422127 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:42:58 crc kubenswrapper[4922]: E0930 00:42:58.422861 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:43:09 crc kubenswrapper[4922]: I0930 00:43:09.423065 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:43:09 crc kubenswrapper[4922]: E0930 00:43:09.424421 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:43:23 crc kubenswrapper[4922]: I0930 00:43:23.422724 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:43:23 crc kubenswrapper[4922]: E0930 00:43:23.423724 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:43:38 crc kubenswrapper[4922]: I0930 00:43:38.422184 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:43:38 crc kubenswrapper[4922]: E0930 00:43:38.422876 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:43:44 crc kubenswrapper[4922]: I0930 00:43:44.484042 4922 generic.go:334] "Generic (PLEG): container finished" podID="5935a421-fbf8-44a7-b65c-fa9bfa84124d" containerID="60e57c972b1ba219456e51bfb43c399a72e723bb961e6dcffbca700723eea44e" exitCode=0 Sep 30 00:43:44 crc kubenswrapper[4922]: I0930 00:43:44.484648 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" event={"ID":"5935a421-fbf8-44a7-b65c-fa9bfa84124d","Type":"ContainerDied","Data":"60e57c972b1ba219456e51bfb43c399a72e723bb961e6dcffbca700723eea44e"} Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.052228 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.252304 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-combined-ca-bundle\") pod \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.252383 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgrtg\" (UniqueName: \"kubernetes.io/projected/5935a421-fbf8-44a7-b65c-fa9bfa84124d-kube-api-access-wgrtg\") pod \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.252498 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-inventory\") pod \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.252634 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ssh-key\") pod \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.252703 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ceph\") pod \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.252751 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-secret-0\") pod \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\" (UID: \"5935a421-fbf8-44a7-b65c-fa9bfa84124d\") " Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.259923 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5935a421-fbf8-44a7-b65c-fa9bfa84124d-kube-api-access-wgrtg" (OuterVolumeSpecName: "kube-api-access-wgrtg") pod "5935a421-fbf8-44a7-b65c-fa9bfa84124d" (UID: "5935a421-fbf8-44a7-b65c-fa9bfa84124d"). InnerVolumeSpecName "kube-api-access-wgrtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.260763 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ceph" (OuterVolumeSpecName: "ceph") pod "5935a421-fbf8-44a7-b65c-fa9bfa84124d" (UID: "5935a421-fbf8-44a7-b65c-fa9bfa84124d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.261691 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "5935a421-fbf8-44a7-b65c-fa9bfa84124d" (UID: "5935a421-fbf8-44a7-b65c-fa9bfa84124d"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.294369 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "5935a421-fbf8-44a7-b65c-fa9bfa84124d" (UID: "5935a421-fbf8-44a7-b65c-fa9bfa84124d"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.304931 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5935a421-fbf8-44a7-b65c-fa9bfa84124d" (UID: "5935a421-fbf8-44a7-b65c-fa9bfa84124d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.318156 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-inventory" (OuterVolumeSpecName: "inventory") pod "5935a421-fbf8-44a7-b65c-fa9bfa84124d" (UID: "5935a421-fbf8-44a7-b65c-fa9bfa84124d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.359125 4922 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.360863 4922 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.361821 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgrtg\" (UniqueName: \"kubernetes.io/projected/5935a421-fbf8-44a7-b65c-fa9bfa84124d-kube-api-access-wgrtg\") on node \"crc\" DevicePath \"\"" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.362049 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.362274 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.362610 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5935a421-fbf8-44a7-b65c-fa9bfa84124d-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.513723 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" event={"ID":"5935a421-fbf8-44a7-b65c-fa9bfa84124d","Type":"ContainerDied","Data":"4e8a4f1deea46758c26237d7ab26b12ea0f3c646b7f5817555031124ea790bf6"} Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.513788 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e8a4f1deea46758c26237d7ab26b12ea0f3c646b7f5817555031124ea790bf6" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.514174 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-ptd8n" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.633896 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-fnhsm"] Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634377 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerName="registry-server" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634416 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerName="registry-server" Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634438 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerName="extract-content" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634448 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerName="extract-content" Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634460 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerName="extract-utilities" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634468 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerName="extract-utilities" Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634482 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerName="extract-content" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634490 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerName="extract-content" Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634507 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerName="registry-server" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634514 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerName="registry-server" Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634527 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerName="extract-utilities" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634534 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerName="extract-utilities" Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634555 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerName="extract-utilities" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634563 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerName="extract-utilities" Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634583 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5935a421-fbf8-44a7-b65c-fa9bfa84124d" containerName="libvirt-openstack-openstack-cell1" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634591 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="5935a421-fbf8-44a7-b65c-fa9bfa84124d" containerName="libvirt-openstack-openstack-cell1" Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634614 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerName="extract-content" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634623 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerName="extract-content" Sep 30 00:43:46 crc kubenswrapper[4922]: E0930 00:43:46.634633 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerName="registry-server" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634640 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerName="registry-server" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634865 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e71331d1-a672-44b2-82b7-d1ff417c263d" containerName="registry-server" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634885 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a7c6c59-9a6c-47f5-a914-c741d54371e6" containerName="registry-server" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634904 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="5935a421-fbf8-44a7-b65c-fa9bfa84124d" containerName="libvirt-openstack-openstack-cell1" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.634919 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d815f75c-d6c2-43c3-918b-a838b7288e10" containerName="registry-server" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.635799 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.638648 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.638897 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.638929 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.640649 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.642906 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.643346 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.643590 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.662672 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-fnhsm"] Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.679462 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.679535 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.679684 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.679785 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.679925 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.680045 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.680138 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-inventory\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.680416 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.680586 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.680686 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ceph\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.680745 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlsfp\" (UniqueName: \"kubernetes.io/projected/71bdff05-728b-4401-be44-30fa83148d22-kube-api-access-dlsfp\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782194 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782268 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782308 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ceph\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782336 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlsfp\" (UniqueName: \"kubernetes.io/projected/71bdff05-728b-4401-be44-30fa83148d22-kube-api-access-dlsfp\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782362 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782379 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782426 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782458 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782500 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782519 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.782540 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-inventory\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.783749 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.784178 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.786857 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.787620 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.787831 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.787931 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.789744 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ceph\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.790594 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-inventory\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.792038 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.794956 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.803729 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlsfp\" (UniqueName: \"kubernetes.io/projected/71bdff05-728b-4401-be44-30fa83148d22-kube-api-access-dlsfp\") pod \"nova-cell1-openstack-openstack-cell1-fnhsm\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:46 crc kubenswrapper[4922]: I0930 00:43:46.972134 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:43:47 crc kubenswrapper[4922]: I0930 00:43:47.656596 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-fnhsm"] Sep 30 00:43:47 crc kubenswrapper[4922]: W0930 00:43:47.672098 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71bdff05_728b_4401_be44_30fa83148d22.slice/crio-e6d8624e5f18b6ce3dc96c587a23685adfdd9e8379dce4152f545c7473f1f03d WatchSource:0}: Error finding container e6d8624e5f18b6ce3dc96c587a23685adfdd9e8379dce4152f545c7473f1f03d: Status 404 returned error can't find the container with id e6d8624e5f18b6ce3dc96c587a23685adfdd9e8379dce4152f545c7473f1f03d Sep 30 00:43:48 crc kubenswrapper[4922]: I0930 00:43:48.545334 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" event={"ID":"71bdff05-728b-4401-be44-30fa83148d22","Type":"ContainerStarted","Data":"b1bca24a6d288b6dae16c0ee2456cf158480438887900d846b6667fa37ee18b8"} Sep 30 00:43:48 crc kubenswrapper[4922]: I0930 00:43:48.545725 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" event={"ID":"71bdff05-728b-4401-be44-30fa83148d22","Type":"ContainerStarted","Data":"e6d8624e5f18b6ce3dc96c587a23685adfdd9e8379dce4152f545c7473f1f03d"} Sep 30 00:43:48 crc kubenswrapper[4922]: I0930 00:43:48.574172 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" podStartSLOduration=2.385143504 podStartE2EDuration="2.574146795s" podCreationTimestamp="2025-09-30 00:43:46 +0000 UTC" firstStartedPulling="2025-09-30 00:43:47.676336584 +0000 UTC m=+8231.986625437" lastFinishedPulling="2025-09-30 00:43:47.865339905 +0000 UTC m=+8232.175628728" observedRunningTime="2025-09-30 00:43:48.568725591 +0000 UTC m=+8232.879014454" watchObservedRunningTime="2025-09-30 00:43:48.574146795 +0000 UTC m=+8232.884435648" Sep 30 00:43:52 crc kubenswrapper[4922]: I0930 00:43:52.422156 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:43:52 crc kubenswrapper[4922]: E0930 00:43:52.423120 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:44:07 crc kubenswrapper[4922]: I0930 00:44:07.422441 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:44:07 crc kubenswrapper[4922]: E0930 00:44:07.423277 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:44:20 crc kubenswrapper[4922]: I0930 00:44:20.423101 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:44:20 crc kubenswrapper[4922]: E0930 00:44:20.424242 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:44:35 crc kubenswrapper[4922]: I0930 00:44:35.421988 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:44:35 crc kubenswrapper[4922]: E0930 00:44:35.423240 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:44:49 crc kubenswrapper[4922]: I0930 00:44:49.422666 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:44:49 crc kubenswrapper[4922]: E0930 00:44:49.423766 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.161322 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr"] Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.164094 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.167466 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.172824 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.177459 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr"] Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.180053 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/646a265f-d156-4688-8a74-7dd5814481f0-config-volume\") pod \"collect-profiles-29319885-w8clr\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.180408 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbtcd\" (UniqueName: \"kubernetes.io/projected/646a265f-d156-4688-8a74-7dd5814481f0-kube-api-access-bbtcd\") pod \"collect-profiles-29319885-w8clr\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.180488 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/646a265f-d156-4688-8a74-7dd5814481f0-secret-volume\") pod \"collect-profiles-29319885-w8clr\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.281344 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbtcd\" (UniqueName: \"kubernetes.io/projected/646a265f-d156-4688-8a74-7dd5814481f0-kube-api-access-bbtcd\") pod \"collect-profiles-29319885-w8clr\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.281451 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/646a265f-d156-4688-8a74-7dd5814481f0-secret-volume\") pod \"collect-profiles-29319885-w8clr\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.281536 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/646a265f-d156-4688-8a74-7dd5814481f0-config-volume\") pod \"collect-profiles-29319885-w8clr\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.282625 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/646a265f-d156-4688-8a74-7dd5814481f0-config-volume\") pod \"collect-profiles-29319885-w8clr\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.300187 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/646a265f-d156-4688-8a74-7dd5814481f0-secret-volume\") pod \"collect-profiles-29319885-w8clr\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.302033 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbtcd\" (UniqueName: \"kubernetes.io/projected/646a265f-d156-4688-8a74-7dd5814481f0-kube-api-access-bbtcd\") pod \"collect-profiles-29319885-w8clr\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:00 crc kubenswrapper[4922]: I0930 00:45:00.496548 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:01 crc kubenswrapper[4922]: I0930 00:45:01.022775 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr"] Sep 30 00:45:01 crc kubenswrapper[4922]: I0930 00:45:01.455735 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" event={"ID":"646a265f-d156-4688-8a74-7dd5814481f0","Type":"ContainerStarted","Data":"57611177830ab225285204c375f49787c0533e84be1ef322116f21ed0888481a"} Sep 30 00:45:01 crc kubenswrapper[4922]: I0930 00:45:01.456069 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" event={"ID":"646a265f-d156-4688-8a74-7dd5814481f0","Type":"ContainerStarted","Data":"d63012d01a3f5bf2b5e77107eb85308234ef27bc93fb3b2fe1f4d76810bc8b76"} Sep 30 00:45:01 crc kubenswrapper[4922]: I0930 00:45:01.481365 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" podStartSLOduration=1.481335868 podStartE2EDuration="1.481335868s" podCreationTimestamp="2025-09-30 00:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 00:45:01.469717091 +0000 UTC m=+8305.780005914" watchObservedRunningTime="2025-09-30 00:45:01.481335868 +0000 UTC m=+8305.791624701" Sep 30 00:45:02 crc kubenswrapper[4922]: I0930 00:45:02.423411 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:45:02 crc kubenswrapper[4922]: E0930 00:45:02.424045 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:45:02 crc kubenswrapper[4922]: I0930 00:45:02.470195 4922 generic.go:334] "Generic (PLEG): container finished" podID="646a265f-d156-4688-8a74-7dd5814481f0" containerID="57611177830ab225285204c375f49787c0533e84be1ef322116f21ed0888481a" exitCode=0 Sep 30 00:45:02 crc kubenswrapper[4922]: I0930 00:45:02.470259 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" event={"ID":"646a265f-d156-4688-8a74-7dd5814481f0","Type":"ContainerDied","Data":"57611177830ab225285204c375f49787c0533e84be1ef322116f21ed0888481a"} Sep 30 00:45:03 crc kubenswrapper[4922]: I0930 00:45:03.941825 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:03 crc kubenswrapper[4922]: I0930 00:45:03.969374 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbtcd\" (UniqueName: \"kubernetes.io/projected/646a265f-d156-4688-8a74-7dd5814481f0-kube-api-access-bbtcd\") pod \"646a265f-d156-4688-8a74-7dd5814481f0\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " Sep 30 00:45:03 crc kubenswrapper[4922]: I0930 00:45:03.969588 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/646a265f-d156-4688-8a74-7dd5814481f0-config-volume\") pod \"646a265f-d156-4688-8a74-7dd5814481f0\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " Sep 30 00:45:03 crc kubenswrapper[4922]: I0930 00:45:03.969677 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/646a265f-d156-4688-8a74-7dd5814481f0-secret-volume\") pod \"646a265f-d156-4688-8a74-7dd5814481f0\" (UID: \"646a265f-d156-4688-8a74-7dd5814481f0\") " Sep 30 00:45:03 crc kubenswrapper[4922]: I0930 00:45:03.973701 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646a265f-d156-4688-8a74-7dd5814481f0-config-volume" (OuterVolumeSpecName: "config-volume") pod "646a265f-d156-4688-8a74-7dd5814481f0" (UID: "646a265f-d156-4688-8a74-7dd5814481f0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:45:03 crc kubenswrapper[4922]: I0930 00:45:03.975609 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/646a265f-d156-4688-8a74-7dd5814481f0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "646a265f-d156-4688-8a74-7dd5814481f0" (UID: "646a265f-d156-4688-8a74-7dd5814481f0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:45:03 crc kubenswrapper[4922]: I0930 00:45:03.976062 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/646a265f-d156-4688-8a74-7dd5814481f0-kube-api-access-bbtcd" (OuterVolumeSpecName: "kube-api-access-bbtcd") pod "646a265f-d156-4688-8a74-7dd5814481f0" (UID: "646a265f-d156-4688-8a74-7dd5814481f0"). InnerVolumeSpecName "kube-api-access-bbtcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:45:04 crc kubenswrapper[4922]: I0930 00:45:04.071277 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/646a265f-d156-4688-8a74-7dd5814481f0-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:45:04 crc kubenswrapper[4922]: I0930 00:45:04.071319 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/646a265f-d156-4688-8a74-7dd5814481f0-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 00:45:04 crc kubenswrapper[4922]: I0930 00:45:04.071337 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbtcd\" (UniqueName: \"kubernetes.io/projected/646a265f-d156-4688-8a74-7dd5814481f0-kube-api-access-bbtcd\") on node \"crc\" DevicePath \"\"" Sep 30 00:45:04 crc kubenswrapper[4922]: I0930 00:45:04.496436 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" event={"ID":"646a265f-d156-4688-8a74-7dd5814481f0","Type":"ContainerDied","Data":"d63012d01a3f5bf2b5e77107eb85308234ef27bc93fb3b2fe1f4d76810bc8b76"} Sep 30 00:45:04 crc kubenswrapper[4922]: I0930 00:45:04.496749 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d63012d01a3f5bf2b5e77107eb85308234ef27bc93fb3b2fe1f4d76810bc8b76" Sep 30 00:45:04 crc kubenswrapper[4922]: I0930 00:45:04.496563 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319885-w8clr" Sep 30 00:45:04 crc kubenswrapper[4922]: I0930 00:45:04.564120 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j"] Sep 30 00:45:04 crc kubenswrapper[4922]: I0930 00:45:04.575366 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319840-hkj8j"] Sep 30 00:45:06 crc kubenswrapper[4922]: I0930 00:45:06.439270 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90d590f0-fe0e-4c8f-9da4-b842519100d9" path="/var/lib/kubelet/pods/90d590f0-fe0e-4c8f-9da4-b842519100d9/volumes" Sep 30 00:45:17 crc kubenswrapper[4922]: I0930 00:45:17.421828 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:45:17 crc kubenswrapper[4922]: E0930 00:45:17.422561 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:45:31 crc kubenswrapper[4922]: I0930 00:45:31.426715 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:45:31 crc kubenswrapper[4922]: E0930 00:45:31.427702 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:45:42 crc kubenswrapper[4922]: I0930 00:45:42.421494 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:45:42 crc kubenswrapper[4922]: E0930 00:45:42.422243 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:45:56 crc kubenswrapper[4922]: I0930 00:45:56.441489 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:45:56 crc kubenswrapper[4922]: E0930 00:45:56.444374 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:45:59 crc kubenswrapper[4922]: I0930 00:45:59.086440 4922 scope.go:117] "RemoveContainer" containerID="bbb4c59fa7b2a46b5117f5be6b2cc3728d94e1e446b3de32b927ecc1ef8104fa" Sep 30 00:46:10 crc kubenswrapper[4922]: I0930 00:46:10.422519 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:46:10 crc kubenswrapper[4922]: E0930 00:46:10.423596 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:46:23 crc kubenswrapper[4922]: I0930 00:46:23.422858 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:46:23 crc kubenswrapper[4922]: E0930 00:46:23.424069 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:46:37 crc kubenswrapper[4922]: I0930 00:46:37.422083 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:46:37 crc kubenswrapper[4922]: E0930 00:46:37.422917 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:46:52 crc kubenswrapper[4922]: I0930 00:46:52.422345 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:46:52 crc kubenswrapper[4922]: E0930 00:46:52.423508 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:47:05 crc kubenswrapper[4922]: I0930 00:47:05.422657 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:47:05 crc kubenswrapper[4922]: E0930 00:47:05.423522 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:47:19 crc kubenswrapper[4922]: I0930 00:47:19.422280 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:47:19 crc kubenswrapper[4922]: E0930 00:47:19.422982 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:47:33 crc kubenswrapper[4922]: I0930 00:47:33.422074 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:47:34 crc kubenswrapper[4922]: I0930 00:47:34.452891 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"02952da391233edeeb02bcaef879f5b7ee29d072ea22b69ac11e12fc9238faa3"} Sep 30 00:47:38 crc kubenswrapper[4922]: I0930 00:47:38.519282 4922 generic.go:334] "Generic (PLEG): container finished" podID="71bdff05-728b-4401-be44-30fa83148d22" containerID="b1bca24a6d288b6dae16c0ee2456cf158480438887900d846b6667fa37ee18b8" exitCode=0 Sep 30 00:47:38 crc kubenswrapper[4922]: I0930 00:47:38.519345 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" event={"ID":"71bdff05-728b-4401-be44-30fa83148d22","Type":"ContainerDied","Data":"b1bca24a6d288b6dae16c0ee2456cf158480438887900d846b6667fa37ee18b8"} Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.096457 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.133824 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-1\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.133868 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlsfp\" (UniqueName: \"kubernetes.io/projected/71bdff05-728b-4401-be44-30fa83148d22-kube-api-access-dlsfp\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.133932 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-combined-ca-bundle\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.134073 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-1\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.134123 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-inventory\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.134156 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-0\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.134180 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-1\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.134214 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-0\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.134235 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ssh-key\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.134433 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-0\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.134460 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ceph\") pod \"71bdff05-728b-4401-be44-30fa83148d22\" (UID: \"71bdff05-728b-4401-be44-30fa83148d22\") " Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.141371 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ceph" (OuterVolumeSpecName: "ceph") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.141736 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71bdff05-728b-4401-be44-30fa83148d22-kube-api-access-dlsfp" (OuterVolumeSpecName: "kube-api-access-dlsfp") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "kube-api-access-dlsfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.196502 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.203528 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.203578 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.208368 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-inventory" (OuterVolumeSpecName: "inventory") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.209126 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.211436 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.215000 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.217344 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.228126 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "71bdff05-728b-4401-be44-30fa83148d22" (UID: "71bdff05-728b-4401-be44-30fa83148d22"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.236994 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237043 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237061 4922 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237074 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237085 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237097 4922 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237108 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237120 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237131 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlsfp\" (UniqueName: \"kubernetes.io/projected/71bdff05-728b-4401-be44-30fa83148d22-kube-api-access-dlsfp\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237142 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71bdff05-728b-4401-be44-30fa83148d22-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.237196 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/71bdff05-728b-4401-be44-30fa83148d22-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.578385 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" event={"ID":"71bdff05-728b-4401-be44-30fa83148d22","Type":"ContainerDied","Data":"e6d8624e5f18b6ce3dc96c587a23685adfdd9e8379dce4152f545c7473f1f03d"} Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.578809 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6d8624e5f18b6ce3dc96c587a23685adfdd9e8379dce4152f545c7473f1f03d" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.578882 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-fnhsm" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.698363 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-nl9hf"] Sep 30 00:47:40 crc kubenswrapper[4922]: E0930 00:47:40.699768 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="646a265f-d156-4688-8a74-7dd5814481f0" containerName="collect-profiles" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.699787 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="646a265f-d156-4688-8a74-7dd5814481f0" containerName="collect-profiles" Sep 30 00:47:40 crc kubenswrapper[4922]: E0930 00:47:40.699814 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71bdff05-728b-4401-be44-30fa83148d22" containerName="nova-cell1-openstack-openstack-cell1" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.699821 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="71bdff05-728b-4401-be44-30fa83148d22" containerName="nova-cell1-openstack-openstack-cell1" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.700028 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="71bdff05-728b-4401-be44-30fa83148d22" containerName="nova-cell1-openstack-openstack-cell1" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.700053 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="646a265f-d156-4688-8a74-7dd5814481f0" containerName="collect-profiles" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.700766 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.707077 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.707204 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.707217 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.707660 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.708527 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.715832 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-nl9hf"] Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.747866 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.747924 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.748587 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ssh-key\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.748714 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceph\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.748811 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-inventory\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.749014 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.749183 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.749348 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxptf\" (UniqueName: \"kubernetes.io/projected/8e76de18-d8f3-45ae-898e-f7262477074d-kube-api-access-pxptf\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.850960 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.851035 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxptf\" (UniqueName: \"kubernetes.io/projected/8e76de18-d8f3-45ae-898e-f7262477074d-kube-api-access-pxptf\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.851129 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.851148 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.851184 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ssh-key\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.851217 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceph\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.851248 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-inventory\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.851306 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.856325 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.856557 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.856649 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ssh-key\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.857120 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceph\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.858849 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-inventory\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.859900 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.865330 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:40 crc kubenswrapper[4922]: I0930 00:47:40.874823 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxptf\" (UniqueName: \"kubernetes.io/projected/8e76de18-d8f3-45ae-898e-f7262477074d-kube-api-access-pxptf\") pod \"telemetry-openstack-openstack-cell1-nl9hf\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:41 crc kubenswrapper[4922]: I0930 00:47:41.034185 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:47:41 crc kubenswrapper[4922]: I0930 00:47:41.591857 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:47:41 crc kubenswrapper[4922]: I0930 00:47:41.592054 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-nl9hf"] Sep 30 00:47:42 crc kubenswrapper[4922]: I0930 00:47:42.607594 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" event={"ID":"8e76de18-d8f3-45ae-898e-f7262477074d","Type":"ContainerStarted","Data":"b396df66c424b4b47e8fe559e486ed93099c1eb76c5f86bc31235ee67edc5290"} Sep 30 00:47:42 crc kubenswrapper[4922]: I0930 00:47:42.608202 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" event={"ID":"8e76de18-d8f3-45ae-898e-f7262477074d","Type":"ContainerStarted","Data":"c9af1b2a3fede2b0823ccbdc2750d02125d8a2895ecbdf5e99e06d4a30017869"} Sep 30 00:47:42 crc kubenswrapper[4922]: I0930 00:47:42.639101 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" podStartSLOduration=2.449596836 podStartE2EDuration="2.63907488s" podCreationTimestamp="2025-09-30 00:47:40 +0000 UTC" firstStartedPulling="2025-09-30 00:47:41.591683701 +0000 UTC m=+8465.901972514" lastFinishedPulling="2025-09-30 00:47:41.781161755 +0000 UTC m=+8466.091450558" observedRunningTime="2025-09-30 00:47:42.628123009 +0000 UTC m=+8466.938411842" watchObservedRunningTime="2025-09-30 00:47:42.63907488 +0000 UTC m=+8466.949363703" Sep 30 00:48:08 crc kubenswrapper[4922]: I0930 00:48:08.951804 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gjc2h"] Sep 30 00:48:08 crc kubenswrapper[4922]: I0930 00:48:08.955364 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:08 crc kubenswrapper[4922]: I0930 00:48:08.973120 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gjc2h"] Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.150203 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-utilities\") pod \"redhat-operators-gjc2h\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.150517 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-catalog-content\") pod \"redhat-operators-gjc2h\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.150691 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct4kg\" (UniqueName: \"kubernetes.io/projected/0487d273-e3c9-4d2e-b42f-9f9d174c114a-kube-api-access-ct4kg\") pod \"redhat-operators-gjc2h\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.253108 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-utilities\") pod \"redhat-operators-gjc2h\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.253178 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-catalog-content\") pod \"redhat-operators-gjc2h\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.253228 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct4kg\" (UniqueName: \"kubernetes.io/projected/0487d273-e3c9-4d2e-b42f-9f9d174c114a-kube-api-access-ct4kg\") pod \"redhat-operators-gjc2h\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.254057 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-utilities\") pod \"redhat-operators-gjc2h\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.254434 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-catalog-content\") pod \"redhat-operators-gjc2h\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.280796 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct4kg\" (UniqueName: \"kubernetes.io/projected/0487d273-e3c9-4d2e-b42f-9f9d174c114a-kube-api-access-ct4kg\") pod \"redhat-operators-gjc2h\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:09 crc kubenswrapper[4922]: I0930 00:48:09.578997 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:10 crc kubenswrapper[4922]: W0930 00:48:10.106746 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0487d273_e3c9_4d2e_b42f_9f9d174c114a.slice/crio-4e954b5fafd9201dc98581f2c23dbdcba22d905f837dd2c53a1f2732be73a612 WatchSource:0}: Error finding container 4e954b5fafd9201dc98581f2c23dbdcba22d905f837dd2c53a1f2732be73a612: Status 404 returned error can't find the container with id 4e954b5fafd9201dc98581f2c23dbdcba22d905f837dd2c53a1f2732be73a612 Sep 30 00:48:10 crc kubenswrapper[4922]: I0930 00:48:10.109169 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gjc2h"] Sep 30 00:48:10 crc kubenswrapper[4922]: I0930 00:48:10.961042 4922 generic.go:334] "Generic (PLEG): container finished" podID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerID="98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697" exitCode=0 Sep 30 00:48:10 crc kubenswrapper[4922]: I0930 00:48:10.961178 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjc2h" event={"ID":"0487d273-e3c9-4d2e-b42f-9f9d174c114a","Type":"ContainerDied","Data":"98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697"} Sep 30 00:48:10 crc kubenswrapper[4922]: I0930 00:48:10.961311 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjc2h" event={"ID":"0487d273-e3c9-4d2e-b42f-9f9d174c114a","Type":"ContainerStarted","Data":"4e954b5fafd9201dc98581f2c23dbdcba22d905f837dd2c53a1f2732be73a612"} Sep 30 00:48:12 crc kubenswrapper[4922]: I0930 00:48:12.986951 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjc2h" event={"ID":"0487d273-e3c9-4d2e-b42f-9f9d174c114a","Type":"ContainerStarted","Data":"ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4"} Sep 30 00:48:17 crc kubenswrapper[4922]: I0930 00:48:17.031541 4922 generic.go:334] "Generic (PLEG): container finished" podID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerID="ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4" exitCode=0 Sep 30 00:48:17 crc kubenswrapper[4922]: I0930 00:48:17.031632 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjc2h" event={"ID":"0487d273-e3c9-4d2e-b42f-9f9d174c114a","Type":"ContainerDied","Data":"ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4"} Sep 30 00:48:18 crc kubenswrapper[4922]: I0930 00:48:18.097474 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjc2h" event={"ID":"0487d273-e3c9-4d2e-b42f-9f9d174c114a","Type":"ContainerStarted","Data":"babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9"} Sep 30 00:48:18 crc kubenswrapper[4922]: I0930 00:48:18.114716 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gjc2h" podStartSLOduration=3.630883476 podStartE2EDuration="10.114701563s" podCreationTimestamp="2025-09-30 00:48:08 +0000 UTC" firstStartedPulling="2025-09-30 00:48:10.963554672 +0000 UTC m=+8495.273843485" lastFinishedPulling="2025-09-30 00:48:17.447372739 +0000 UTC m=+8501.757661572" observedRunningTime="2025-09-30 00:48:18.114235922 +0000 UTC m=+8502.424524735" watchObservedRunningTime="2025-09-30 00:48:18.114701563 +0000 UTC m=+8502.424990376" Sep 30 00:48:19 crc kubenswrapper[4922]: I0930 00:48:19.579951 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:19 crc kubenswrapper[4922]: I0930 00:48:19.580271 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:20 crc kubenswrapper[4922]: I0930 00:48:20.657039 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gjc2h" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="registry-server" probeResult="failure" output=< Sep 30 00:48:20 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 00:48:20 crc kubenswrapper[4922]: > Sep 30 00:48:30 crc kubenswrapper[4922]: I0930 00:48:30.644094 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gjc2h" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="registry-server" probeResult="failure" output=< Sep 30 00:48:30 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 00:48:30 crc kubenswrapper[4922]: > Sep 30 00:48:39 crc kubenswrapper[4922]: I0930 00:48:39.664458 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:39 crc kubenswrapper[4922]: I0930 00:48:39.726702 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:40 crc kubenswrapper[4922]: I0930 00:48:40.160952 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gjc2h"] Sep 30 00:48:41 crc kubenswrapper[4922]: I0930 00:48:41.362726 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gjc2h" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="registry-server" containerID="cri-o://babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9" gracePeriod=2 Sep 30 00:48:41 crc kubenswrapper[4922]: I0930 00:48:41.898573 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:41 crc kubenswrapper[4922]: I0930 00:48:41.958850 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct4kg\" (UniqueName: \"kubernetes.io/projected/0487d273-e3c9-4d2e-b42f-9f9d174c114a-kube-api-access-ct4kg\") pod \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " Sep 30 00:48:41 crc kubenswrapper[4922]: I0930 00:48:41.959019 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-utilities\") pod \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " Sep 30 00:48:41 crc kubenswrapper[4922]: I0930 00:48:41.959067 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-catalog-content\") pod \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\" (UID: \"0487d273-e3c9-4d2e-b42f-9f9d174c114a\") " Sep 30 00:48:41 crc kubenswrapper[4922]: I0930 00:48:41.960974 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-utilities" (OuterVolumeSpecName: "utilities") pod "0487d273-e3c9-4d2e-b42f-9f9d174c114a" (UID: "0487d273-e3c9-4d2e-b42f-9f9d174c114a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:48:41 crc kubenswrapper[4922]: I0930 00:48:41.970287 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0487d273-e3c9-4d2e-b42f-9f9d174c114a-kube-api-access-ct4kg" (OuterVolumeSpecName: "kube-api-access-ct4kg") pod "0487d273-e3c9-4d2e-b42f-9f9d174c114a" (UID: "0487d273-e3c9-4d2e-b42f-9f9d174c114a"). InnerVolumeSpecName "kube-api-access-ct4kg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.039766 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0487d273-e3c9-4d2e-b42f-9f9d174c114a" (UID: "0487d273-e3c9-4d2e-b42f-9f9d174c114a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.061925 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct4kg\" (UniqueName: \"kubernetes.io/projected/0487d273-e3c9-4d2e-b42f-9f9d174c114a-kube-api-access-ct4kg\") on node \"crc\" DevicePath \"\"" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.061972 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.061986 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0487d273-e3c9-4d2e-b42f-9f9d174c114a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.379781 4922 generic.go:334] "Generic (PLEG): container finished" podID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerID="babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9" exitCode=0 Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.379829 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjc2h" event={"ID":"0487d273-e3c9-4d2e-b42f-9f9d174c114a","Type":"ContainerDied","Data":"babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9"} Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.379859 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gjc2h" event={"ID":"0487d273-e3c9-4d2e-b42f-9f9d174c114a","Type":"ContainerDied","Data":"4e954b5fafd9201dc98581f2c23dbdcba22d905f837dd2c53a1f2732be73a612"} Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.379889 4922 scope.go:117] "RemoveContainer" containerID="babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.379952 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gjc2h" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.417611 4922 scope.go:117] "RemoveContainer" containerID="ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.454429 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gjc2h"] Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.460887 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gjc2h"] Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.471459 4922 scope.go:117] "RemoveContainer" containerID="98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.497156 4922 scope.go:117] "RemoveContainer" containerID="babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9" Sep 30 00:48:42 crc kubenswrapper[4922]: E0930 00:48:42.497745 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9\": container with ID starting with babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9 not found: ID does not exist" containerID="babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.497798 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9"} err="failed to get container status \"babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9\": rpc error: code = NotFound desc = could not find container \"babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9\": container with ID starting with babe04b5c4da3d1bb2313f0a46ef31390813b535e34c426e7af835f8d19da3f9 not found: ID does not exist" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.497831 4922 scope.go:117] "RemoveContainer" containerID="ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4" Sep 30 00:48:42 crc kubenswrapper[4922]: E0930 00:48:42.498172 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4\": container with ID starting with ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4 not found: ID does not exist" containerID="ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.498202 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4"} err="failed to get container status \"ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4\": rpc error: code = NotFound desc = could not find container \"ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4\": container with ID starting with ee0e3d681c6ba2ec53978da7165bec39544f852b2f6523afcbfaebe9f3cf73c4 not found: ID does not exist" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.498216 4922 scope.go:117] "RemoveContainer" containerID="98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697" Sep 30 00:48:42 crc kubenswrapper[4922]: E0930 00:48:42.498588 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697\": container with ID starting with 98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697 not found: ID does not exist" containerID="98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697" Sep 30 00:48:42 crc kubenswrapper[4922]: I0930 00:48:42.498632 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697"} err="failed to get container status \"98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697\": rpc error: code = NotFound desc = could not find container \"98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697\": container with ID starting with 98794a782f3384eecc916cf839ab693c48f8269b59b69b9425e53ed2f3952697 not found: ID does not exist" Sep 30 00:48:44 crc kubenswrapper[4922]: I0930 00:48:44.443509 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" path="/var/lib/kubelet/pods/0487d273-e3c9-4d2e-b42f-9f9d174c114a/volumes" Sep 30 00:49:58 crc kubenswrapper[4922]: I0930 00:49:58.913067 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:49:58 crc kubenswrapper[4922]: I0930 00:49:58.913735 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:50:28 crc kubenswrapper[4922]: I0930 00:50:28.912322 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:50:28 crc kubenswrapper[4922]: I0930 00:50:28.912985 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:50:58 crc kubenswrapper[4922]: I0930 00:50:58.912385 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:50:58 crc kubenswrapper[4922]: I0930 00:50:58.913311 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:50:58 crc kubenswrapper[4922]: I0930 00:50:58.913386 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:50:58 crc kubenswrapper[4922]: I0930 00:50:58.914927 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"02952da391233edeeb02bcaef879f5b7ee29d072ea22b69ac11e12fc9238faa3"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:50:58 crc kubenswrapper[4922]: I0930 00:50:58.914998 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://02952da391233edeeb02bcaef879f5b7ee29d072ea22b69ac11e12fc9238faa3" gracePeriod=600 Sep 30 00:50:59 crc kubenswrapper[4922]: I0930 00:50:59.096239 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="02952da391233edeeb02bcaef879f5b7ee29d072ea22b69ac11e12fc9238faa3" exitCode=0 Sep 30 00:50:59 crc kubenswrapper[4922]: I0930 00:50:59.096276 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"02952da391233edeeb02bcaef879f5b7ee29d072ea22b69ac11e12fc9238faa3"} Sep 30 00:50:59 crc kubenswrapper[4922]: I0930 00:50:59.096305 4922 scope.go:117] "RemoveContainer" containerID="f087c486e91fe49538ba9514eb09dc1e296f155731bca10d6635a62a04a8b2f1" Sep 30 00:51:00 crc kubenswrapper[4922]: I0930 00:51:00.109060 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300"} Sep 30 00:52:21 crc kubenswrapper[4922]: I0930 00:52:21.177249 4922 generic.go:334] "Generic (PLEG): container finished" podID="8e76de18-d8f3-45ae-898e-f7262477074d" containerID="b396df66c424b4b47e8fe559e486ed93099c1eb76c5f86bc31235ee67edc5290" exitCode=0 Sep 30 00:52:21 crc kubenswrapper[4922]: I0930 00:52:21.177321 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" event={"ID":"8e76de18-d8f3-45ae-898e-f7262477074d","Type":"ContainerDied","Data":"b396df66c424b4b47e8fe559e486ed93099c1eb76c5f86bc31235ee67edc5290"} Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.680513 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.850059 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceph\") pod \"8e76de18-d8f3-45ae-898e-f7262477074d\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.850124 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ssh-key\") pod \"8e76de18-d8f3-45ae-898e-f7262477074d\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.850265 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-1\") pod \"8e76de18-d8f3-45ae-898e-f7262477074d\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.850297 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-telemetry-combined-ca-bundle\") pod \"8e76de18-d8f3-45ae-898e-f7262477074d\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.850948 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-inventory\") pod \"8e76de18-d8f3-45ae-898e-f7262477074d\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.851337 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-2\") pod \"8e76de18-d8f3-45ae-898e-f7262477074d\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.851380 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-0\") pod \"8e76de18-d8f3-45ae-898e-f7262477074d\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.851435 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxptf\" (UniqueName: \"kubernetes.io/projected/8e76de18-d8f3-45ae-898e-f7262477074d-kube-api-access-pxptf\") pod \"8e76de18-d8f3-45ae-898e-f7262477074d\" (UID: \"8e76de18-d8f3-45ae-898e-f7262477074d\") " Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.857863 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e76de18-d8f3-45ae-898e-f7262477074d-kube-api-access-pxptf" (OuterVolumeSpecName: "kube-api-access-pxptf") pod "8e76de18-d8f3-45ae-898e-f7262477074d" (UID: "8e76de18-d8f3-45ae-898e-f7262477074d"). InnerVolumeSpecName "kube-api-access-pxptf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.858196 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "8e76de18-d8f3-45ae-898e-f7262477074d" (UID: "8e76de18-d8f3-45ae-898e-f7262477074d"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.863900 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceph" (OuterVolumeSpecName: "ceph") pod "8e76de18-d8f3-45ae-898e-f7262477074d" (UID: "8e76de18-d8f3-45ae-898e-f7262477074d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.892124 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-inventory" (OuterVolumeSpecName: "inventory") pod "8e76de18-d8f3-45ae-898e-f7262477074d" (UID: "8e76de18-d8f3-45ae-898e-f7262477074d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.903791 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "8e76de18-d8f3-45ae-898e-f7262477074d" (UID: "8e76de18-d8f3-45ae-898e-f7262477074d"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.905681 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "8e76de18-d8f3-45ae-898e-f7262477074d" (UID: "8e76de18-d8f3-45ae-898e-f7262477074d"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.909936 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8e76de18-d8f3-45ae-898e-f7262477074d" (UID: "8e76de18-d8f3-45ae-898e-f7262477074d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.932454 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "8e76de18-d8f3-45ae-898e-f7262477074d" (UID: "8e76de18-d8f3-45ae-898e-f7262477074d"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.955417 4922 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.955834 4922 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.956144 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.956272 4922 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.956417 4922 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.956559 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxptf\" (UniqueName: \"kubernetes.io/projected/8e76de18-d8f3-45ae-898e-f7262477074d-kube-api-access-pxptf\") on node \"crc\" DevicePath \"\"" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.956677 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:52:22 crc kubenswrapper[4922]: I0930 00:52:22.956800 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e76de18-d8f3-45ae-898e-f7262477074d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.209744 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" event={"ID":"8e76de18-d8f3-45ae-898e-f7262477074d","Type":"ContainerDied","Data":"c9af1b2a3fede2b0823ccbdc2750d02125d8a2895ecbdf5e99e06d4a30017869"} Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.209813 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9af1b2a3fede2b0823ccbdc2750d02125d8a2895ecbdf5e99e06d4a30017869" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.209836 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-nl9hf" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.360899 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-8vwvj"] Sep 30 00:52:23 crc kubenswrapper[4922]: E0930 00:52:23.361630 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="extract-content" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.361661 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="extract-content" Sep 30 00:52:23 crc kubenswrapper[4922]: E0930 00:52:23.361729 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e76de18-d8f3-45ae-898e-f7262477074d" containerName="telemetry-openstack-openstack-cell1" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.361750 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e76de18-d8f3-45ae-898e-f7262477074d" containerName="telemetry-openstack-openstack-cell1" Sep 30 00:52:23 crc kubenswrapper[4922]: E0930 00:52:23.361795 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="registry-server" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.361811 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="registry-server" Sep 30 00:52:23 crc kubenswrapper[4922]: E0930 00:52:23.361878 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="extract-utilities" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.361895 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="extract-utilities" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.362192 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e76de18-d8f3-45ae-898e-f7262477074d" containerName="telemetry-openstack-openstack-cell1" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.362258 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0487d273-e3c9-4d2e-b42f-9f9d174c114a" containerName="registry-server" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.363275 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.368042 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.368431 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.368471 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.368695 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.368998 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.377690 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-8vwvj"] Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.467410 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.467639 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.467749 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.467842 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nh6s\" (UniqueName: \"kubernetes.io/projected/8b134d4f-b9e7-4a35-9214-ef18899dba9a-kube-api-access-5nh6s\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.467874 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.467938 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.569607 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.569863 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.569918 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.569964 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nh6s\" (UniqueName: \"kubernetes.io/projected/8b134d4f-b9e7-4a35-9214-ef18899dba9a-kube-api-access-5nh6s\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.569991 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.570032 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.578905 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.579090 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.592631 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nh6s\" (UniqueName: \"kubernetes.io/projected/8b134d4f-b9e7-4a35-9214-ef18899dba9a-kube-api-access-5nh6s\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.598593 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.599366 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.601085 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-8vwvj\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:23 crc kubenswrapper[4922]: I0930 00:52:23.762563 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:52:24 crc kubenswrapper[4922]: I0930 00:52:24.332622 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-8vwvj"] Sep 30 00:52:24 crc kubenswrapper[4922]: W0930 00:52:24.342330 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b134d4f_b9e7_4a35_9214_ef18899dba9a.slice/crio-9b57ca7afe618464b1561c88495f78d34ee49b4a0f462a5cf95164be8693a668 WatchSource:0}: Error finding container 9b57ca7afe618464b1561c88495f78d34ee49b4a0f462a5cf95164be8693a668: Status 404 returned error can't find the container with id 9b57ca7afe618464b1561c88495f78d34ee49b4a0f462a5cf95164be8693a668 Sep 30 00:52:25 crc kubenswrapper[4922]: I0930 00:52:25.253745 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" event={"ID":"8b134d4f-b9e7-4a35-9214-ef18899dba9a","Type":"ContainerStarted","Data":"48393a29a705bb0c6f32df03c3dc70c12b11516d00e57992b0877637e605ad0b"} Sep 30 00:52:25 crc kubenswrapper[4922]: I0930 00:52:25.254180 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" event={"ID":"8b134d4f-b9e7-4a35-9214-ef18899dba9a","Type":"ContainerStarted","Data":"9b57ca7afe618464b1561c88495f78d34ee49b4a0f462a5cf95164be8693a668"} Sep 30 00:52:25 crc kubenswrapper[4922]: I0930 00:52:25.280321 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" podStartSLOduration=2.069708425 podStartE2EDuration="2.280298851s" podCreationTimestamp="2025-09-30 00:52:23 +0000 UTC" firstStartedPulling="2025-09-30 00:52:24.349766011 +0000 UTC m=+8748.660054844" lastFinishedPulling="2025-09-30 00:52:24.560356457 +0000 UTC m=+8748.870645270" observedRunningTime="2025-09-30 00:52:25.276533688 +0000 UTC m=+8749.586822521" watchObservedRunningTime="2025-09-30 00:52:25.280298851 +0000 UTC m=+8749.590587664" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.124280 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-74hqb"] Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.127652 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.163445 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-74hqb"] Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.247488 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sglrl\" (UniqueName: \"kubernetes.io/projected/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-kube-api-access-sglrl\") pod \"community-operators-74hqb\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.247731 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-utilities\") pod \"community-operators-74hqb\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.247985 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-catalog-content\") pod \"community-operators-74hqb\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.350665 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-catalog-content\") pod \"community-operators-74hqb\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.351189 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-catalog-content\") pod \"community-operators-74hqb\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.351459 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sglrl\" (UniqueName: \"kubernetes.io/projected/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-kube-api-access-sglrl\") pod \"community-operators-74hqb\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.351545 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-utilities\") pod \"community-operators-74hqb\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.351852 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-utilities\") pod \"community-operators-74hqb\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.375778 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sglrl\" (UniqueName: \"kubernetes.io/projected/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-kube-api-access-sglrl\") pod \"community-operators-74hqb\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.470167 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:06 crc kubenswrapper[4922]: I0930 00:53:06.974073 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-74hqb"] Sep 30 00:53:07 crc kubenswrapper[4922]: I0930 00:53:07.808757 4922 generic.go:334] "Generic (PLEG): container finished" podID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerID="69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6" exitCode=0 Sep 30 00:53:07 crc kubenswrapper[4922]: I0930 00:53:07.808822 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74hqb" event={"ID":"73c3bada-4ce7-464e-80ad-eb7bf6ec56be","Type":"ContainerDied","Data":"69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6"} Sep 30 00:53:07 crc kubenswrapper[4922]: I0930 00:53:07.810549 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74hqb" event={"ID":"73c3bada-4ce7-464e-80ad-eb7bf6ec56be","Type":"ContainerStarted","Data":"d9d94ba01ba180f47b4b00a806c4826f67366b61d130b6a362b3349c98e4f48c"} Sep 30 00:53:07 crc kubenswrapper[4922]: I0930 00:53:07.811843 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:53:08 crc kubenswrapper[4922]: I0930 00:53:08.827756 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74hqb" event={"ID":"73c3bada-4ce7-464e-80ad-eb7bf6ec56be","Type":"ContainerStarted","Data":"21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f"} Sep 30 00:53:10 crc kubenswrapper[4922]: I0930 00:53:10.853491 4922 generic.go:334] "Generic (PLEG): container finished" podID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerID="21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f" exitCode=0 Sep 30 00:53:10 crc kubenswrapper[4922]: I0930 00:53:10.853575 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74hqb" event={"ID":"73c3bada-4ce7-464e-80ad-eb7bf6ec56be","Type":"ContainerDied","Data":"21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f"} Sep 30 00:53:11 crc kubenswrapper[4922]: I0930 00:53:11.869263 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74hqb" event={"ID":"73c3bada-4ce7-464e-80ad-eb7bf6ec56be","Type":"ContainerStarted","Data":"3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c"} Sep 30 00:53:11 crc kubenswrapper[4922]: I0930 00:53:11.911416 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-74hqb" podStartSLOduration=2.286042215 podStartE2EDuration="5.911376s" podCreationTimestamp="2025-09-30 00:53:06 +0000 UTC" firstStartedPulling="2025-09-30 00:53:07.811474895 +0000 UTC m=+8792.121763738" lastFinishedPulling="2025-09-30 00:53:11.43680869 +0000 UTC m=+8795.747097523" observedRunningTime="2025-09-30 00:53:11.895170539 +0000 UTC m=+8796.205459372" watchObservedRunningTime="2025-09-30 00:53:11.911376 +0000 UTC m=+8796.221664823" Sep 30 00:53:16 crc kubenswrapper[4922]: I0930 00:53:16.471135 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:16 crc kubenswrapper[4922]: I0930 00:53:16.471769 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:17 crc kubenswrapper[4922]: I0930 00:53:17.521210 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-74hqb" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerName="registry-server" probeResult="failure" output=< Sep 30 00:53:17 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 00:53:17 crc kubenswrapper[4922]: > Sep 30 00:53:26 crc kubenswrapper[4922]: I0930 00:53:26.546023 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:26 crc kubenswrapper[4922]: I0930 00:53:26.636137 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:26 crc kubenswrapper[4922]: I0930 00:53:26.806643 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-74hqb"] Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.058239 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-74hqb" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerName="registry-server" containerID="cri-o://3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c" gracePeriod=2 Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.614473 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.688007 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-utilities\") pod \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.688606 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sglrl\" (UniqueName: \"kubernetes.io/projected/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-kube-api-access-sglrl\") pod \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.688788 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-catalog-content\") pod \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\" (UID: \"73c3bada-4ce7-464e-80ad-eb7bf6ec56be\") " Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.688942 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-utilities" (OuterVolumeSpecName: "utilities") pod "73c3bada-4ce7-464e-80ad-eb7bf6ec56be" (UID: "73c3bada-4ce7-464e-80ad-eb7bf6ec56be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.689811 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.694694 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-kube-api-access-sglrl" (OuterVolumeSpecName: "kube-api-access-sglrl") pod "73c3bada-4ce7-464e-80ad-eb7bf6ec56be" (UID: "73c3bada-4ce7-464e-80ad-eb7bf6ec56be"). InnerVolumeSpecName "kube-api-access-sglrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.755748 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "73c3bada-4ce7-464e-80ad-eb7bf6ec56be" (UID: "73c3bada-4ce7-464e-80ad-eb7bf6ec56be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.792106 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.792154 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sglrl\" (UniqueName: \"kubernetes.io/projected/73c3bada-4ce7-464e-80ad-eb7bf6ec56be-kube-api-access-sglrl\") on node \"crc\" DevicePath \"\"" Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.913185 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:53:28 crc kubenswrapper[4922]: I0930 00:53:28.913241 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.076879 4922 generic.go:334] "Generic (PLEG): container finished" podID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerID="3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c" exitCode=0 Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.076951 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74hqb" event={"ID":"73c3bada-4ce7-464e-80ad-eb7bf6ec56be","Type":"ContainerDied","Data":"3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c"} Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.077048 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74hqb" event={"ID":"73c3bada-4ce7-464e-80ad-eb7bf6ec56be","Type":"ContainerDied","Data":"d9d94ba01ba180f47b4b00a806c4826f67366b61d130b6a362b3349c98e4f48c"} Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.077084 4922 scope.go:117] "RemoveContainer" containerID="3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.076973 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74hqb" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.126271 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-74hqb"] Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.130378 4922 scope.go:117] "RemoveContainer" containerID="21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.139382 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-74hqb"] Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.174169 4922 scope.go:117] "RemoveContainer" containerID="69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.235885 4922 scope.go:117] "RemoveContainer" containerID="3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c" Sep 30 00:53:29 crc kubenswrapper[4922]: E0930 00:53:29.236466 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c\": container with ID starting with 3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c not found: ID does not exist" containerID="3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.236626 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c"} err="failed to get container status \"3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c\": rpc error: code = NotFound desc = could not find container \"3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c\": container with ID starting with 3428ab5d7468ad1a2bf7ddc8bf2d9fb6ffbb39d49436b91982527c032100958c not found: ID does not exist" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.236739 4922 scope.go:117] "RemoveContainer" containerID="21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f" Sep 30 00:53:29 crc kubenswrapper[4922]: E0930 00:53:29.237156 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f\": container with ID starting with 21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f not found: ID does not exist" containerID="21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.237334 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f"} err="failed to get container status \"21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f\": rpc error: code = NotFound desc = could not find container \"21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f\": container with ID starting with 21651d4f1d48c1410159634eceb51e21559889d9f4b2df1af801f1bac4ed3a8f not found: ID does not exist" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.237524 4922 scope.go:117] "RemoveContainer" containerID="69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6" Sep 30 00:53:29 crc kubenswrapper[4922]: E0930 00:53:29.237968 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6\": container with ID starting with 69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6 not found: ID does not exist" containerID="69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6" Sep 30 00:53:29 crc kubenswrapper[4922]: I0930 00:53:29.238094 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6"} err="failed to get container status \"69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6\": rpc error: code = NotFound desc = could not find container \"69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6\": container with ID starting with 69037e0ce5a403ac104e5e3ea5f7fa50070d18c397013bd593861e8622ba8da6 not found: ID does not exist" Sep 30 00:53:30 crc kubenswrapper[4922]: I0930 00:53:30.450736 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" path="/var/lib/kubelet/pods/73c3bada-4ce7-464e-80ad-eb7bf6ec56be/volumes" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.091609 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wxnzc"] Sep 30 00:53:51 crc kubenswrapper[4922]: E0930 00:53:51.093067 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerName="extract-utilities" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.093178 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerName="extract-utilities" Sep 30 00:53:51 crc kubenswrapper[4922]: E0930 00:53:51.093201 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerName="extract-content" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.093210 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerName="extract-content" Sep 30 00:53:51 crc kubenswrapper[4922]: E0930 00:53:51.093257 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerName="registry-server" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.093267 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerName="registry-server" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.093558 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="73c3bada-4ce7-464e-80ad-eb7bf6ec56be" containerName="registry-server" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.095498 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.115996 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wxnzc"] Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.264666 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-catalog-content\") pod \"redhat-marketplace-wxnzc\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.265116 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-utilities\") pod \"redhat-marketplace-wxnzc\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.265265 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mh96\" (UniqueName: \"kubernetes.io/projected/f3a6e203-9290-4f1d-bf09-1e22b6f57709-kube-api-access-5mh96\") pod \"redhat-marketplace-wxnzc\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.366910 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-catalog-content\") pod \"redhat-marketplace-wxnzc\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.367267 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-utilities\") pod \"redhat-marketplace-wxnzc\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.367376 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mh96\" (UniqueName: \"kubernetes.io/projected/f3a6e203-9290-4f1d-bf09-1e22b6f57709-kube-api-access-5mh96\") pod \"redhat-marketplace-wxnzc\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.367380 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-catalog-content\") pod \"redhat-marketplace-wxnzc\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.367734 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-utilities\") pod \"redhat-marketplace-wxnzc\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.388767 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mh96\" (UniqueName: \"kubernetes.io/projected/f3a6e203-9290-4f1d-bf09-1e22b6f57709-kube-api-access-5mh96\") pod \"redhat-marketplace-wxnzc\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.422039 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:53:51 crc kubenswrapper[4922]: I0930 00:53:51.910502 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wxnzc"] Sep 30 00:53:52 crc kubenswrapper[4922]: I0930 00:53:52.390736 4922 generic.go:334] "Generic (PLEG): container finished" podID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerID="c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233" exitCode=0 Sep 30 00:53:52 crc kubenswrapper[4922]: I0930 00:53:52.391019 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wxnzc" event={"ID":"f3a6e203-9290-4f1d-bf09-1e22b6f57709","Type":"ContainerDied","Data":"c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233"} Sep 30 00:53:52 crc kubenswrapper[4922]: I0930 00:53:52.391381 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wxnzc" event={"ID":"f3a6e203-9290-4f1d-bf09-1e22b6f57709","Type":"ContainerStarted","Data":"5297853db9992f40ae35c710405cb11de7460eeb12d935550354619949f49518"} Sep 30 00:53:53 crc kubenswrapper[4922]: I0930 00:53:53.403848 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wxnzc" event={"ID":"f3a6e203-9290-4f1d-bf09-1e22b6f57709","Type":"ContainerStarted","Data":"eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca"} Sep 30 00:53:54 crc kubenswrapper[4922]: I0930 00:53:54.415534 4922 generic.go:334] "Generic (PLEG): container finished" podID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerID="eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca" exitCode=0 Sep 30 00:53:54 crc kubenswrapper[4922]: I0930 00:53:54.415629 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wxnzc" event={"ID":"f3a6e203-9290-4f1d-bf09-1e22b6f57709","Type":"ContainerDied","Data":"eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca"} Sep 30 00:53:55 crc kubenswrapper[4922]: I0930 00:53:55.427872 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wxnzc" event={"ID":"f3a6e203-9290-4f1d-bf09-1e22b6f57709","Type":"ContainerStarted","Data":"090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3"} Sep 30 00:53:55 crc kubenswrapper[4922]: I0930 00:53:55.462190 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wxnzc" podStartSLOduration=1.8047146600000001 podStartE2EDuration="4.462167434s" podCreationTimestamp="2025-09-30 00:53:51 +0000 UTC" firstStartedPulling="2025-09-30 00:53:52.393659431 +0000 UTC m=+8836.703948264" lastFinishedPulling="2025-09-30 00:53:55.051112195 +0000 UTC m=+8839.361401038" observedRunningTime="2025-09-30 00:53:55.459831626 +0000 UTC m=+8839.770120469" watchObservedRunningTime="2025-09-30 00:53:55.462167434 +0000 UTC m=+8839.772456257" Sep 30 00:53:58 crc kubenswrapper[4922]: I0930 00:53:58.913110 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:53:58 crc kubenswrapper[4922]: I0930 00:53:58.914238 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:54:01 crc kubenswrapper[4922]: I0930 00:54:01.423070 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:54:01 crc kubenswrapper[4922]: I0930 00:54:01.423515 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:54:01 crc kubenswrapper[4922]: I0930 00:54:01.504848 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:54:01 crc kubenswrapper[4922]: I0930 00:54:01.599457 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:54:01 crc kubenswrapper[4922]: I0930 00:54:01.818765 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wxnzc"] Sep 30 00:54:03 crc kubenswrapper[4922]: I0930 00:54:03.533854 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wxnzc" podUID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerName="registry-server" containerID="cri-o://090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3" gracePeriod=2 Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.055789 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.233252 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5mh96\" (UniqueName: \"kubernetes.io/projected/f3a6e203-9290-4f1d-bf09-1e22b6f57709-kube-api-access-5mh96\") pod \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.233754 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-utilities\") pod \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.233855 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-catalog-content\") pod \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\" (UID: \"f3a6e203-9290-4f1d-bf09-1e22b6f57709\") " Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.234662 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-utilities" (OuterVolumeSpecName: "utilities") pod "f3a6e203-9290-4f1d-bf09-1e22b6f57709" (UID: "f3a6e203-9290-4f1d-bf09-1e22b6f57709"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.242505 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3a6e203-9290-4f1d-bf09-1e22b6f57709-kube-api-access-5mh96" (OuterVolumeSpecName: "kube-api-access-5mh96") pod "f3a6e203-9290-4f1d-bf09-1e22b6f57709" (UID: "f3a6e203-9290-4f1d-bf09-1e22b6f57709"). InnerVolumeSpecName "kube-api-access-5mh96". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.248722 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3a6e203-9290-4f1d-bf09-1e22b6f57709" (UID: "f3a6e203-9290-4f1d-bf09-1e22b6f57709"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.336359 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5mh96\" (UniqueName: \"kubernetes.io/projected/f3a6e203-9290-4f1d-bf09-1e22b6f57709-kube-api-access-5mh96\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.336404 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.336418 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3a6e203-9290-4f1d-bf09-1e22b6f57709-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.558320 4922 generic.go:334] "Generic (PLEG): container finished" podID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerID="090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3" exitCode=0 Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.558416 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wxnzc" event={"ID":"f3a6e203-9290-4f1d-bf09-1e22b6f57709","Type":"ContainerDied","Data":"090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3"} Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.558451 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wxnzc" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.558505 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wxnzc" event={"ID":"f3a6e203-9290-4f1d-bf09-1e22b6f57709","Type":"ContainerDied","Data":"5297853db9992f40ae35c710405cb11de7460eeb12d935550354619949f49518"} Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.558588 4922 scope.go:117] "RemoveContainer" containerID="090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.609303 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wxnzc"] Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.610170 4922 scope.go:117] "RemoveContainer" containerID="eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.626665 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wxnzc"] Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.676017 4922 scope.go:117] "RemoveContainer" containerID="c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.720155 4922 scope.go:117] "RemoveContainer" containerID="090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3" Sep 30 00:54:04 crc kubenswrapper[4922]: E0930 00:54:04.720605 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3\": container with ID starting with 090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3 not found: ID does not exist" containerID="090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.720640 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3"} err="failed to get container status \"090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3\": rpc error: code = NotFound desc = could not find container \"090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3\": container with ID starting with 090fc48e52b746a1db5fcd4cf74ef31340cc0759317dce66466aba5e466813d3 not found: ID does not exist" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.720664 4922 scope.go:117] "RemoveContainer" containerID="eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca" Sep 30 00:54:04 crc kubenswrapper[4922]: E0930 00:54:04.721070 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca\": container with ID starting with eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca not found: ID does not exist" containerID="eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.721095 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca"} err="failed to get container status \"eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca\": rpc error: code = NotFound desc = could not find container \"eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca\": container with ID starting with eaddff2c06738ab4002ebf9462af960bd9c7c26f3bb47af2b42250ee0079acca not found: ID does not exist" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.721113 4922 scope.go:117] "RemoveContainer" containerID="c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233" Sep 30 00:54:04 crc kubenswrapper[4922]: E0930 00:54:04.721544 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233\": container with ID starting with c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233 not found: ID does not exist" containerID="c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233" Sep 30 00:54:04 crc kubenswrapper[4922]: I0930 00:54:04.721569 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233"} err="failed to get container status \"c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233\": rpc error: code = NotFound desc = could not find container \"c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233\": container with ID starting with c0b6ae7832ac8cd307a5992d566f5262c32ed9199ffc7d7d7780f2bf64cd8233 not found: ID does not exist" Sep 30 00:54:06 crc kubenswrapper[4922]: I0930 00:54:06.454568 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" path="/var/lib/kubelet/pods/f3a6e203-9290-4f1d-bf09-1e22b6f57709/volumes" Sep 30 00:54:28 crc kubenswrapper[4922]: I0930 00:54:28.913173 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 00:54:28 crc kubenswrapper[4922]: I0930 00:54:28.913779 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 00:54:28 crc kubenswrapper[4922]: I0930 00:54:28.913823 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 00:54:28 crc kubenswrapper[4922]: I0930 00:54:28.914730 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 00:54:28 crc kubenswrapper[4922]: I0930 00:54:28.914806 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" gracePeriod=600 Sep 30 00:54:29 crc kubenswrapper[4922]: E0930 00:54:29.039857 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:54:29 crc kubenswrapper[4922]: I0930 00:54:29.883289 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" exitCode=0 Sep 30 00:54:29 crc kubenswrapper[4922]: I0930 00:54:29.883380 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300"} Sep 30 00:54:29 crc kubenswrapper[4922]: I0930 00:54:29.884132 4922 scope.go:117] "RemoveContainer" containerID="02952da391233edeeb02bcaef879f5b7ee29d072ea22b69ac11e12fc9238faa3" Sep 30 00:54:29 crc kubenswrapper[4922]: I0930 00:54:29.885488 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:54:29 crc kubenswrapper[4922]: E0930 00:54:29.886097 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:54:40 crc kubenswrapper[4922]: I0930 00:54:40.422097 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:54:40 crc kubenswrapper[4922]: E0930 00:54:40.423210 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:54:53 crc kubenswrapper[4922]: I0930 00:54:53.422038 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:54:53 crc kubenswrapper[4922]: E0930 00:54:53.423200 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:55:05 crc kubenswrapper[4922]: I0930 00:55:05.422079 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:55:05 crc kubenswrapper[4922]: E0930 00:55:05.423021 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:55:16 crc kubenswrapper[4922]: I0930 00:55:16.443291 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:55:16 crc kubenswrapper[4922]: E0930 00:55:16.446059 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:55:29 crc kubenswrapper[4922]: I0930 00:55:29.422430 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:55:29 crc kubenswrapper[4922]: E0930 00:55:29.423593 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:55:42 crc kubenswrapper[4922]: I0930 00:55:42.422989 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:55:42 crc kubenswrapper[4922]: E0930 00:55:42.424530 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.410489 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-48486"] Sep 30 00:55:51 crc kubenswrapper[4922]: E0930 00:55:51.412667 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerName="extract-utilities" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.412776 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerName="extract-utilities" Sep 30 00:55:51 crc kubenswrapper[4922]: E0930 00:55:51.412861 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerName="extract-content" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.412936 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerName="extract-content" Sep 30 00:55:51 crc kubenswrapper[4922]: E0930 00:55:51.413101 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerName="registry-server" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.413187 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerName="registry-server" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.413549 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3a6e203-9290-4f1d-bf09-1e22b6f57709" containerName="registry-server" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.441379 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.459628 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-48486"] Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.578706 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-utilities\") pod \"certified-operators-48486\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.578765 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85prh\" (UniqueName: \"kubernetes.io/projected/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-kube-api-access-85prh\") pod \"certified-operators-48486\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.578877 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-catalog-content\") pod \"certified-operators-48486\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.680833 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-utilities\") pod \"certified-operators-48486\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.680895 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85prh\" (UniqueName: \"kubernetes.io/projected/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-kube-api-access-85prh\") pod \"certified-operators-48486\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.680998 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-catalog-content\") pod \"certified-operators-48486\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.681592 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-catalog-content\") pod \"certified-operators-48486\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.681691 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-utilities\") pod \"certified-operators-48486\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.703645 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85prh\" (UniqueName: \"kubernetes.io/projected/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-kube-api-access-85prh\") pod \"certified-operators-48486\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:51 crc kubenswrapper[4922]: I0930 00:55:51.778487 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-48486" Sep 30 00:55:52 crc kubenswrapper[4922]: I0930 00:55:52.327131 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-48486"] Sep 30 00:55:52 crc kubenswrapper[4922]: I0930 00:55:52.973050 4922 generic.go:334] "Generic (PLEG): container finished" podID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerID="3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a" exitCode=0 Sep 30 00:55:52 crc kubenswrapper[4922]: I0930 00:55:52.973122 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-48486" event={"ID":"6fb4f746-e541-4bb0-b5d1-96774e7e89c3","Type":"ContainerDied","Data":"3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a"} Sep 30 00:55:52 crc kubenswrapper[4922]: I0930 00:55:52.973183 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-48486" event={"ID":"6fb4f746-e541-4bb0-b5d1-96774e7e89c3","Type":"ContainerStarted","Data":"1d00b118717d32b1d12308621cc79a4814587ba333d6e3172f54560389564e9a"} Sep 30 00:55:53 crc kubenswrapper[4922]: I0930 00:55:53.988148 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-48486" event={"ID":"6fb4f746-e541-4bb0-b5d1-96774e7e89c3","Type":"ContainerStarted","Data":"69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f"} Sep 30 00:55:56 crc kubenswrapper[4922]: I0930 00:55:56.015910 4922 generic.go:334] "Generic (PLEG): container finished" podID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerID="69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f" exitCode=0 Sep 30 00:55:56 crc kubenswrapper[4922]: I0930 00:55:56.016024 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-48486" event={"ID":"6fb4f746-e541-4bb0-b5d1-96774e7e89c3","Type":"ContainerDied","Data":"69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f"} Sep 30 00:55:56 crc kubenswrapper[4922]: I0930 00:55:56.422131 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:55:56 crc kubenswrapper[4922]: E0930 00:55:56.422887 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:55:57 crc kubenswrapper[4922]: I0930 00:55:57.030968 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-48486" event={"ID":"6fb4f746-e541-4bb0-b5d1-96774e7e89c3","Type":"ContainerStarted","Data":"4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8"} Sep 30 00:55:57 crc kubenswrapper[4922]: I0930 00:55:57.056972 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-48486" podStartSLOduration=2.612544511 podStartE2EDuration="6.056956074s" podCreationTimestamp="2025-09-30 00:55:51 +0000 UTC" firstStartedPulling="2025-09-30 00:55:52.97528188 +0000 UTC m=+8957.285570723" lastFinishedPulling="2025-09-30 00:55:56.419693483 +0000 UTC m=+8960.729982286" observedRunningTime="2025-09-30 00:55:57.049720266 +0000 UTC m=+8961.360009099" watchObservedRunningTime="2025-09-30 00:55:57.056956074 +0000 UTC m=+8961.367244887" Sep 30 00:56:01 crc kubenswrapper[4922]: I0930 00:56:01.779372 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-48486" Sep 30 00:56:01 crc kubenswrapper[4922]: I0930 00:56:01.780076 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-48486" Sep 30 00:56:01 crc kubenswrapper[4922]: I0930 00:56:01.842629 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-48486" Sep 30 00:56:02 crc kubenswrapper[4922]: I0930 00:56:02.198793 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-48486" Sep 30 00:56:02 crc kubenswrapper[4922]: I0930 00:56:02.261833 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-48486"] Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.125384 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-48486" podUID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerName="registry-server" containerID="cri-o://4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8" gracePeriod=2 Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.719729 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-48486" Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.819364 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85prh\" (UniqueName: \"kubernetes.io/projected/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-kube-api-access-85prh\") pod \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.819508 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-utilities\") pod \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.819539 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-catalog-content\") pod \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\" (UID: \"6fb4f746-e541-4bb0-b5d1-96774e7e89c3\") " Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.821002 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-utilities" (OuterVolumeSpecName: "utilities") pod "6fb4f746-e541-4bb0-b5d1-96774e7e89c3" (UID: "6fb4f746-e541-4bb0-b5d1-96774e7e89c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.826155 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-kube-api-access-85prh" (OuterVolumeSpecName: "kube-api-access-85prh") pod "6fb4f746-e541-4bb0-b5d1-96774e7e89c3" (UID: "6fb4f746-e541-4bb0-b5d1-96774e7e89c3"). InnerVolumeSpecName "kube-api-access-85prh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.862825 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6fb4f746-e541-4bb0-b5d1-96774e7e89c3" (UID: "6fb4f746-e541-4bb0-b5d1-96774e7e89c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.922351 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85prh\" (UniqueName: \"kubernetes.io/projected/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-kube-api-access-85prh\") on node \"crc\" DevicePath \"\"" Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.922422 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:56:04 crc kubenswrapper[4922]: I0930 00:56:04.922446 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fb4f746-e541-4bb0-b5d1-96774e7e89c3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.137859 4922 generic.go:334] "Generic (PLEG): container finished" podID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerID="4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8" exitCode=0 Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.137904 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-48486" event={"ID":"6fb4f746-e541-4bb0-b5d1-96774e7e89c3","Type":"ContainerDied","Data":"4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8"} Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.137939 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-48486" event={"ID":"6fb4f746-e541-4bb0-b5d1-96774e7e89c3","Type":"ContainerDied","Data":"1d00b118717d32b1d12308621cc79a4814587ba333d6e3172f54560389564e9a"} Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.137959 4922 scope.go:117] "RemoveContainer" containerID="4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.137991 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-48486" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.182151 4922 scope.go:117] "RemoveContainer" containerID="69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.201141 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-48486"] Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.210027 4922 scope.go:117] "RemoveContainer" containerID="3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.210548 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-48486"] Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.271811 4922 scope.go:117] "RemoveContainer" containerID="4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8" Sep 30 00:56:05 crc kubenswrapper[4922]: E0930 00:56:05.272833 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8\": container with ID starting with 4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8 not found: ID does not exist" containerID="4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.272940 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8"} err="failed to get container status \"4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8\": rpc error: code = NotFound desc = could not find container \"4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8\": container with ID starting with 4f4db82cd956438963a46f23e0f008b24ddc732799b2b9e650b5eeed32a7a6c8 not found: ID does not exist" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.273021 4922 scope.go:117] "RemoveContainer" containerID="69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f" Sep 30 00:56:05 crc kubenswrapper[4922]: E0930 00:56:05.273716 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f\": container with ID starting with 69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f not found: ID does not exist" containerID="69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.274000 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f"} err="failed to get container status \"69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f\": rpc error: code = NotFound desc = could not find container \"69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f\": container with ID starting with 69a8bca56d4a57b45e004b816a40cb1e962e11f2975e09f0cb2f5b401db6736f not found: ID does not exist" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.274246 4922 scope.go:117] "RemoveContainer" containerID="3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a" Sep 30 00:56:05 crc kubenswrapper[4922]: E0930 00:56:05.275042 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a\": container with ID starting with 3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a not found: ID does not exist" containerID="3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a" Sep 30 00:56:05 crc kubenswrapper[4922]: I0930 00:56:05.275093 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a"} err="failed to get container status \"3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a\": rpc error: code = NotFound desc = could not find container \"3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a\": container with ID starting with 3ea175f07de69abceab91d537852d7809d24e1e30554ccab6a8bff80d486ab0a not found: ID does not exist" Sep 30 00:56:06 crc kubenswrapper[4922]: I0930 00:56:06.444279 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" path="/var/lib/kubelet/pods/6fb4f746-e541-4bb0-b5d1-96774e7e89c3/volumes" Sep 30 00:56:08 crc kubenswrapper[4922]: I0930 00:56:08.422297 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:56:08 crc kubenswrapper[4922]: E0930 00:56:08.423052 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:56:23 crc kubenswrapper[4922]: I0930 00:56:23.422343 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:56:23 crc kubenswrapper[4922]: E0930 00:56:23.423519 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:56:38 crc kubenswrapper[4922]: I0930 00:56:38.422079 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:56:38 crc kubenswrapper[4922]: E0930 00:56:38.422924 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:56:51 crc kubenswrapper[4922]: I0930 00:56:51.422307 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:56:51 crc kubenswrapper[4922]: E0930 00:56:51.423088 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:57:03 crc kubenswrapper[4922]: I0930 00:57:03.875746 4922 generic.go:334] "Generic (PLEG): container finished" podID="8b134d4f-b9e7-4a35-9214-ef18899dba9a" containerID="48393a29a705bb0c6f32df03c3dc70c12b11516d00e57992b0877637e605ad0b" exitCode=0 Sep 30 00:57:03 crc kubenswrapper[4922]: I0930 00:57:03.875831 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" event={"ID":"8b134d4f-b9e7-4a35-9214-ef18899dba9a","Type":"ContainerDied","Data":"48393a29a705bb0c6f32df03c3dc70c12b11516d00e57992b0877637e605ad0b"} Sep 30 00:57:04 crc kubenswrapper[4922]: I0930 00:57:04.421954 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:57:04 crc kubenswrapper[4922]: E0930 00:57:04.422278 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.453890 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.482852 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ceph\") pod \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.482977 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nh6s\" (UniqueName: \"kubernetes.io/projected/8b134d4f-b9e7-4a35-9214-ef18899dba9a-kube-api-access-5nh6s\") pod \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.483032 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ssh-key\") pod \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.483193 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-combined-ca-bundle\") pod \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.483371 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-inventory\") pod \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.483433 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-agent-neutron-config-0\") pod \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\" (UID: \"8b134d4f-b9e7-4a35-9214-ef18899dba9a\") " Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.489948 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "8b134d4f-b9e7-4a35-9214-ef18899dba9a" (UID: "8b134d4f-b9e7-4a35-9214-ef18899dba9a"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.490322 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b134d4f-b9e7-4a35-9214-ef18899dba9a-kube-api-access-5nh6s" (OuterVolumeSpecName: "kube-api-access-5nh6s") pod "8b134d4f-b9e7-4a35-9214-ef18899dba9a" (UID: "8b134d4f-b9e7-4a35-9214-ef18899dba9a"). InnerVolumeSpecName "kube-api-access-5nh6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.510103 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ceph" (OuterVolumeSpecName: "ceph") pod "8b134d4f-b9e7-4a35-9214-ef18899dba9a" (UID: "8b134d4f-b9e7-4a35-9214-ef18899dba9a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.530687 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8b134d4f-b9e7-4a35-9214-ef18899dba9a" (UID: "8b134d4f-b9e7-4a35-9214-ef18899dba9a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.530935 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "8b134d4f-b9e7-4a35-9214-ef18899dba9a" (UID: "8b134d4f-b9e7-4a35-9214-ef18899dba9a"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.560874 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-inventory" (OuterVolumeSpecName: "inventory") pod "8b134d4f-b9e7-4a35-9214-ef18899dba9a" (UID: "8b134d4f-b9e7-4a35-9214-ef18899dba9a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.587120 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.587168 4922 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.587192 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.587210 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nh6s\" (UniqueName: \"kubernetes.io/projected/8b134d4f-b9e7-4a35-9214-ef18899dba9a-kube-api-access-5nh6s\") on node \"crc\" DevicePath \"\"" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.587240 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.587257 4922 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b134d4f-b9e7-4a35-9214-ef18899dba9a-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.909066 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" event={"ID":"8b134d4f-b9e7-4a35-9214-ef18899dba9a","Type":"ContainerDied","Data":"9b57ca7afe618464b1561c88495f78d34ee49b4a0f462a5cf95164be8693a668"} Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.909109 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b57ca7afe618464b1561c88495f78d34ee49b4a0f462a5cf95164be8693a668" Sep 30 00:57:05 crc kubenswrapper[4922]: I0930 00:57:05.909157 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-8vwvj" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.001985 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v"] Sep 30 00:57:06 crc kubenswrapper[4922]: E0930 00:57:06.002524 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerName="registry-server" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.002543 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerName="registry-server" Sep 30 00:57:06 crc kubenswrapper[4922]: E0930 00:57:06.002560 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b134d4f-b9e7-4a35-9214-ef18899dba9a" containerName="neutron-sriov-openstack-openstack-cell1" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.002568 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b134d4f-b9e7-4a35-9214-ef18899dba9a" containerName="neutron-sriov-openstack-openstack-cell1" Sep 30 00:57:06 crc kubenswrapper[4922]: E0930 00:57:06.002610 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerName="extract-content" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.002620 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerName="extract-content" Sep 30 00:57:06 crc kubenswrapper[4922]: E0930 00:57:06.002635 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerName="extract-utilities" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.002643 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerName="extract-utilities" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.002851 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fb4f746-e541-4bb0-b5d1-96774e7e89c3" containerName="registry-server" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.002863 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b134d4f-b9e7-4a35-9214-ef18899dba9a" containerName="neutron-sriov-openstack-openstack-cell1" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.003826 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.011072 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.011113 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.011573 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.011762 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.011870 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.053709 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v"] Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.107590 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkrdw\" (UniqueName: \"kubernetes.io/projected/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-kube-api-access-mkrdw\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.107659 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.108142 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.108291 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.108639 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.108763 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.211162 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.211278 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.211302 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.211340 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkrdw\" (UniqueName: \"kubernetes.io/projected/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-kube-api-access-mkrdw\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.211363 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.211453 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.217075 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.218208 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.218328 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.218579 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.219287 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.246474 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkrdw\" (UniqueName: \"kubernetes.io/projected/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-kube-api-access-mkrdw\") pod \"neutron-dhcp-openstack-openstack-cell1-9tk7v\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.328133 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 00:57:06 crc kubenswrapper[4922]: I0930 00:57:06.951559 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v"] Sep 30 00:57:07 crc kubenswrapper[4922]: I0930 00:57:07.937763 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" event={"ID":"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a","Type":"ContainerStarted","Data":"c759eec3fed87debe4616683875a358d0cdff5b59c9ae6751ab86c1f37d8b0a9"} Sep 30 00:57:07 crc kubenswrapper[4922]: I0930 00:57:07.938118 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" event={"ID":"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a","Type":"ContainerStarted","Data":"0bb20f5c2bbf82081e00bf63423966fa2cab8e75f282fd028c3984d9c8d11d28"} Sep 30 00:57:07 crc kubenswrapper[4922]: I0930 00:57:07.965859 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" podStartSLOduration=2.799673333 podStartE2EDuration="2.965829719s" podCreationTimestamp="2025-09-30 00:57:05 +0000 UTC" firstStartedPulling="2025-09-30 00:57:06.956969044 +0000 UTC m=+9031.267257857" lastFinishedPulling="2025-09-30 00:57:07.12312543 +0000 UTC m=+9031.433414243" observedRunningTime="2025-09-30 00:57:07.952999972 +0000 UTC m=+9032.263288815" watchObservedRunningTime="2025-09-30 00:57:07.965829719 +0000 UTC m=+9032.276118562" Sep 30 00:57:18 crc kubenswrapper[4922]: I0930 00:57:18.422544 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:57:18 crc kubenswrapper[4922]: E0930 00:57:18.423148 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:57:31 crc kubenswrapper[4922]: I0930 00:57:31.422338 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:57:31 crc kubenswrapper[4922]: E0930 00:57:31.423795 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:57:44 crc kubenswrapper[4922]: I0930 00:57:44.422342 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:57:44 crc kubenswrapper[4922]: E0930 00:57:44.423610 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:57:58 crc kubenswrapper[4922]: I0930 00:57:58.421834 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:57:58 crc kubenswrapper[4922]: E0930 00:57:58.422627 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.422849 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:58:09 crc kubenswrapper[4922]: E0930 00:58:09.424053 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.672491 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-shdpt"] Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.674746 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.687039 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-shdpt"] Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.757653 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-utilities\") pod \"redhat-operators-shdpt\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.757930 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmppx\" (UniqueName: \"kubernetes.io/projected/e3361878-06d3-4803-a8de-74a27caf9778-kube-api-access-rmppx\") pod \"redhat-operators-shdpt\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.758323 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-catalog-content\") pod \"redhat-operators-shdpt\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.861234 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-utilities\") pod \"redhat-operators-shdpt\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.861363 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmppx\" (UniqueName: \"kubernetes.io/projected/e3361878-06d3-4803-a8de-74a27caf9778-kube-api-access-rmppx\") pod \"redhat-operators-shdpt\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.861462 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-catalog-content\") pod \"redhat-operators-shdpt\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.862253 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-catalog-content\") pod \"redhat-operators-shdpt\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.862407 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-utilities\") pod \"redhat-operators-shdpt\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:09 crc kubenswrapper[4922]: I0930 00:58:09.909000 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmppx\" (UniqueName: \"kubernetes.io/projected/e3361878-06d3-4803-a8de-74a27caf9778-kube-api-access-rmppx\") pod \"redhat-operators-shdpt\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:10 crc kubenswrapper[4922]: I0930 00:58:10.015779 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:10 crc kubenswrapper[4922]: I0930 00:58:10.533656 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-shdpt"] Sep 30 00:58:10 crc kubenswrapper[4922]: I0930 00:58:10.739564 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shdpt" event={"ID":"e3361878-06d3-4803-a8de-74a27caf9778","Type":"ContainerStarted","Data":"45f9476a9baacaf71c227e9cb1488fe3a028c01d539e5125b0ec163dd01cb346"} Sep 30 00:58:11 crc kubenswrapper[4922]: I0930 00:58:11.753534 4922 generic.go:334] "Generic (PLEG): container finished" podID="e3361878-06d3-4803-a8de-74a27caf9778" containerID="e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe" exitCode=0 Sep 30 00:58:11 crc kubenswrapper[4922]: I0930 00:58:11.753655 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shdpt" event={"ID":"e3361878-06d3-4803-a8de-74a27caf9778","Type":"ContainerDied","Data":"e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe"} Sep 30 00:58:11 crc kubenswrapper[4922]: I0930 00:58:11.756525 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 00:58:12 crc kubenswrapper[4922]: I0930 00:58:12.766797 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shdpt" event={"ID":"e3361878-06d3-4803-a8de-74a27caf9778","Type":"ContainerStarted","Data":"24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3"} Sep 30 00:58:16 crc kubenswrapper[4922]: I0930 00:58:16.818414 4922 generic.go:334] "Generic (PLEG): container finished" podID="e3361878-06d3-4803-a8de-74a27caf9778" containerID="24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3" exitCode=0 Sep 30 00:58:16 crc kubenswrapper[4922]: I0930 00:58:16.818525 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shdpt" event={"ID":"e3361878-06d3-4803-a8de-74a27caf9778","Type":"ContainerDied","Data":"24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3"} Sep 30 00:58:17 crc kubenswrapper[4922]: I0930 00:58:17.833661 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shdpt" event={"ID":"e3361878-06d3-4803-a8de-74a27caf9778","Type":"ContainerStarted","Data":"1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d"} Sep 30 00:58:17 crc kubenswrapper[4922]: I0930 00:58:17.871663 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-shdpt" podStartSLOduration=3.297271639 podStartE2EDuration="8.871632928s" podCreationTimestamp="2025-09-30 00:58:09 +0000 UTC" firstStartedPulling="2025-09-30 00:58:11.756215436 +0000 UTC m=+9096.066504249" lastFinishedPulling="2025-09-30 00:58:17.330576705 +0000 UTC m=+9101.640865538" observedRunningTime="2025-09-30 00:58:17.853002847 +0000 UTC m=+9102.163291700" watchObservedRunningTime="2025-09-30 00:58:17.871632928 +0000 UTC m=+9102.181921781" Sep 30 00:58:20 crc kubenswrapper[4922]: I0930 00:58:20.016264 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:20 crc kubenswrapper[4922]: I0930 00:58:20.018005 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:21 crc kubenswrapper[4922]: I0930 00:58:21.083327 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-shdpt" podUID="e3361878-06d3-4803-a8de-74a27caf9778" containerName="registry-server" probeResult="failure" output=< Sep 30 00:58:21 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 00:58:21 crc kubenswrapper[4922]: > Sep 30 00:58:23 crc kubenswrapper[4922]: I0930 00:58:23.423033 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:58:23 crc kubenswrapper[4922]: E0930 00:58:23.424144 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:58:30 crc kubenswrapper[4922]: I0930 00:58:30.109286 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:30 crc kubenswrapper[4922]: I0930 00:58:30.203148 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:30 crc kubenswrapper[4922]: I0930 00:58:30.364375 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-shdpt"] Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.011449 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-shdpt" podUID="e3361878-06d3-4803-a8de-74a27caf9778" containerName="registry-server" containerID="cri-o://1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d" gracePeriod=2 Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.558112 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.689890 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmppx\" (UniqueName: \"kubernetes.io/projected/e3361878-06d3-4803-a8de-74a27caf9778-kube-api-access-rmppx\") pod \"e3361878-06d3-4803-a8de-74a27caf9778\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.690527 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-catalog-content\") pod \"e3361878-06d3-4803-a8de-74a27caf9778\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.690685 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-utilities\") pod \"e3361878-06d3-4803-a8de-74a27caf9778\" (UID: \"e3361878-06d3-4803-a8de-74a27caf9778\") " Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.691373 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-utilities" (OuterVolumeSpecName: "utilities") pod "e3361878-06d3-4803-a8de-74a27caf9778" (UID: "e3361878-06d3-4803-a8de-74a27caf9778"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.691547 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.696951 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3361878-06d3-4803-a8de-74a27caf9778-kube-api-access-rmppx" (OuterVolumeSpecName: "kube-api-access-rmppx") pod "e3361878-06d3-4803-a8de-74a27caf9778" (UID: "e3361878-06d3-4803-a8de-74a27caf9778"). InnerVolumeSpecName "kube-api-access-rmppx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.793289 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmppx\" (UniqueName: \"kubernetes.io/projected/e3361878-06d3-4803-a8de-74a27caf9778-kube-api-access-rmppx\") on node \"crc\" DevicePath \"\"" Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.813076 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e3361878-06d3-4803-a8de-74a27caf9778" (UID: "e3361878-06d3-4803-a8de-74a27caf9778"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 00:58:32 crc kubenswrapper[4922]: I0930 00:58:32.897097 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e3361878-06d3-4803-a8de-74a27caf9778-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.028329 4922 generic.go:334] "Generic (PLEG): container finished" podID="e3361878-06d3-4803-a8de-74a27caf9778" containerID="1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d" exitCode=0 Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.028374 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shdpt" event={"ID":"e3361878-06d3-4803-a8de-74a27caf9778","Type":"ContainerDied","Data":"1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d"} Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.028474 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-shdpt" event={"ID":"e3361878-06d3-4803-a8de-74a27caf9778","Type":"ContainerDied","Data":"45f9476a9baacaf71c227e9cb1488fe3a028c01d539e5125b0ec163dd01cb346"} Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.028510 4922 scope.go:117] "RemoveContainer" containerID="1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.028785 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-shdpt" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.061379 4922 scope.go:117] "RemoveContainer" containerID="24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.083271 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-shdpt"] Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.092761 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-shdpt"] Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.096475 4922 scope.go:117] "RemoveContainer" containerID="e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.173769 4922 scope.go:117] "RemoveContainer" containerID="1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d" Sep 30 00:58:33 crc kubenswrapper[4922]: E0930 00:58:33.174624 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d\": container with ID starting with 1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d not found: ID does not exist" containerID="1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.174654 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d"} err="failed to get container status \"1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d\": rpc error: code = NotFound desc = could not find container \"1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d\": container with ID starting with 1744b333b4160559c763c9bae8ab725563626c8531c4ee3f8a793d302a5b630d not found: ID does not exist" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.174678 4922 scope.go:117] "RemoveContainer" containerID="24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3" Sep 30 00:58:33 crc kubenswrapper[4922]: E0930 00:58:33.175094 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3\": container with ID starting with 24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3 not found: ID does not exist" containerID="24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.175121 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3"} err="failed to get container status \"24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3\": rpc error: code = NotFound desc = could not find container \"24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3\": container with ID starting with 24522279dae69646cee401c9ccb7018a446e173073ff4c0429b9b9684ab3b7e3 not found: ID does not exist" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.175140 4922 scope.go:117] "RemoveContainer" containerID="e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe" Sep 30 00:58:33 crc kubenswrapper[4922]: E0930 00:58:33.175502 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe\": container with ID starting with e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe not found: ID does not exist" containerID="e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe" Sep 30 00:58:33 crc kubenswrapper[4922]: I0930 00:58:33.175524 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe"} err="failed to get container status \"e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe\": rpc error: code = NotFound desc = could not find container \"e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe\": container with ID starting with e16949aaa54515bb9849a625538d038a4beca95d1c5b253c9f613decdcca8dbe not found: ID does not exist" Sep 30 00:58:34 crc kubenswrapper[4922]: I0930 00:58:34.443920 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3361878-06d3-4803-a8de-74a27caf9778" path="/var/lib/kubelet/pods/e3361878-06d3-4803-a8de-74a27caf9778/volumes" Sep 30 00:58:38 crc kubenswrapper[4922]: I0930 00:58:38.421734 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:58:38 crc kubenswrapper[4922]: E0930 00:58:38.422755 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:58:50 crc kubenswrapper[4922]: I0930 00:58:50.422825 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:58:50 crc kubenswrapper[4922]: E0930 00:58:50.423856 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:59:03 crc kubenswrapper[4922]: I0930 00:59:03.422709 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:59:03 crc kubenswrapper[4922]: E0930 00:59:03.423935 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:59:14 crc kubenswrapper[4922]: I0930 00:59:14.422732 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:59:14 crc kubenswrapper[4922]: E0930 00:59:14.423916 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 00:59:29 crc kubenswrapper[4922]: I0930 00:59:29.422498 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 00:59:29 crc kubenswrapper[4922]: I0930 00:59:29.690931 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"6686e73c3e2aba1d57d05c28c5f8c044d54fdff0633d242879d8a8c88e13fb85"} Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.182692 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s"] Sep 30 01:00:00 crc kubenswrapper[4922]: E0930 01:00:00.184024 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3361878-06d3-4803-a8de-74a27caf9778" containerName="extract-content" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.184047 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3361878-06d3-4803-a8de-74a27caf9778" containerName="extract-content" Sep 30 01:00:00 crc kubenswrapper[4922]: E0930 01:00:00.184078 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3361878-06d3-4803-a8de-74a27caf9778" containerName="extract-utilities" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.184090 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3361878-06d3-4803-a8de-74a27caf9778" containerName="extract-utilities" Sep 30 01:00:00 crc kubenswrapper[4922]: E0930 01:00:00.184116 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3361878-06d3-4803-a8de-74a27caf9778" containerName="registry-server" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.184130 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3361878-06d3-4803-a8de-74a27caf9778" containerName="registry-server" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.184544 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3361878-06d3-4803-a8de-74a27caf9778" containerName="registry-server" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.186003 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.189048 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.189493 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.196418 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s"] Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.335768 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-config-volume\") pod \"collect-profiles-29319900-dq56s\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.335827 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-secret-volume\") pod \"collect-profiles-29319900-dq56s\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.335981 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfhdm\" (UniqueName: \"kubernetes.io/projected/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-kube-api-access-tfhdm\") pod \"collect-profiles-29319900-dq56s\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.438305 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfhdm\" (UniqueName: \"kubernetes.io/projected/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-kube-api-access-tfhdm\") pod \"collect-profiles-29319900-dq56s\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.438468 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-config-volume\") pod \"collect-profiles-29319900-dq56s\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.438515 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-secret-volume\") pod \"collect-profiles-29319900-dq56s\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.439912 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-config-volume\") pod \"collect-profiles-29319900-dq56s\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.445568 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-secret-volume\") pod \"collect-profiles-29319900-dq56s\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.457117 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfhdm\" (UniqueName: \"kubernetes.io/projected/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-kube-api-access-tfhdm\") pod \"collect-profiles-29319900-dq56s\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:00 crc kubenswrapper[4922]: I0930 01:00:00.517269 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:01 crc kubenswrapper[4922]: I0930 01:00:01.020090 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s"] Sep 30 01:00:01 crc kubenswrapper[4922]: I0930 01:00:01.079592 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" event={"ID":"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722","Type":"ContainerStarted","Data":"cfc41663de467a173b6228a4b710810d4ceae860b2bf1408e8b3a07f3292eab1"} Sep 30 01:00:02 crc kubenswrapper[4922]: I0930 01:00:02.091587 4922 generic.go:334] "Generic (PLEG): container finished" podID="1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722" containerID="78462bd5aeec86f77d3571155dfe0d38cb1472f4c95758f9d6f27c6b4c94c885" exitCode=0 Sep 30 01:00:02 crc kubenswrapper[4922]: I0930 01:00:02.091668 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" event={"ID":"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722","Type":"ContainerDied","Data":"78462bd5aeec86f77d3571155dfe0d38cb1472f4c95758f9d6f27c6b4c94c885"} Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.597047 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.717003 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-config-volume\") pod \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.717232 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfhdm\" (UniqueName: \"kubernetes.io/projected/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-kube-api-access-tfhdm\") pod \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.717302 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-secret-volume\") pod \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\" (UID: \"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722\") " Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.717805 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-config-volume" (OuterVolumeSpecName: "config-volume") pod "1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722" (UID: "1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.725613 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722" (UID: "1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.725760 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-kube-api-access-tfhdm" (OuterVolumeSpecName: "kube-api-access-tfhdm") pod "1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722" (UID: "1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722"). InnerVolumeSpecName "kube-api-access-tfhdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.819616 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.819646 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfhdm\" (UniqueName: \"kubernetes.io/projected/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-kube-api-access-tfhdm\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:03 crc kubenswrapper[4922]: I0930 01:00:03.819658 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:00:04 crc kubenswrapper[4922]: I0930 01:00:04.119143 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" event={"ID":"1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722","Type":"ContainerDied","Data":"cfc41663de467a173b6228a4b710810d4ceae860b2bf1408e8b3a07f3292eab1"} Sep 30 01:00:04 crc kubenswrapper[4922]: I0930 01:00:04.119195 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cfc41663de467a173b6228a4b710810d4ceae860b2bf1408e8b3a07f3292eab1" Sep 30 01:00:04 crc kubenswrapper[4922]: I0930 01:00:04.119235 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319900-dq56s" Sep 30 01:00:04 crc kubenswrapper[4922]: I0930 01:00:04.692304 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh"] Sep 30 01:00:04 crc kubenswrapper[4922]: I0930 01:00:04.706075 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319855-884bh"] Sep 30 01:00:06 crc kubenswrapper[4922]: I0930 01:00:06.436253 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34125185-0edb-4028-b601-e85f01414b4d" path="/var/lib/kubelet/pods/34125185-0edb-4028-b601-e85f01414b4d/volumes" Sep 30 01:00:59 crc kubenswrapper[4922]: I0930 01:00:59.691916 4922 scope.go:117] "RemoveContainer" containerID="4b437d7fb287ae27f38e87ee7192722a71089302a3d18b4868e8a5994e35c826" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.184958 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319901-ddsjv"] Sep 30 01:01:00 crc kubenswrapper[4922]: E0930 01:01:00.185836 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722" containerName="collect-profiles" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.185852 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722" containerName="collect-profiles" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.186173 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cd2f42d-05c5-4d2a-a5ab-5c3b1c6ab722" containerName="collect-profiles" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.187299 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.219854 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319901-ddsjv"] Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.273841 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-combined-ca-bundle\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.274150 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt2qk\" (UniqueName: \"kubernetes.io/projected/a465ff5e-6650-4b99-b0b7-d6986c555992-kube-api-access-tt2qk\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.274974 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-config-data\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.275046 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-fernet-keys\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.377371 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-config-data\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.377444 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-fernet-keys\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.377504 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-combined-ca-bundle\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.377578 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt2qk\" (UniqueName: \"kubernetes.io/projected/a465ff5e-6650-4b99-b0b7-d6986c555992-kube-api-access-tt2qk\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.384368 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-config-data\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.384478 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-fernet-keys\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.387178 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-combined-ca-bundle\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.401148 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt2qk\" (UniqueName: \"kubernetes.io/projected/a465ff5e-6650-4b99-b0b7-d6986c555992-kube-api-access-tt2qk\") pod \"keystone-cron-29319901-ddsjv\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:00 crc kubenswrapper[4922]: I0930 01:01:00.523674 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:01 crc kubenswrapper[4922]: I0930 01:01:01.010209 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319901-ddsjv"] Sep 30 01:01:01 crc kubenswrapper[4922]: I0930 01:01:01.959824 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319901-ddsjv" event={"ID":"a465ff5e-6650-4b99-b0b7-d6986c555992","Type":"ContainerStarted","Data":"49c746461371a5c0cb0fc959c77ba23d911b5e68f53034e2541e7df88e9a7e53"} Sep 30 01:01:01 crc kubenswrapper[4922]: I0930 01:01:01.960600 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319901-ddsjv" event={"ID":"a465ff5e-6650-4b99-b0b7-d6986c555992","Type":"ContainerStarted","Data":"881da461762b313e7af5f4f9a19ca7a0f6a21c84a5a9c4b631faedb44b70c917"} Sep 30 01:01:01 crc kubenswrapper[4922]: I0930 01:01:01.994165 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319901-ddsjv" podStartSLOduration=1.994134561 podStartE2EDuration="1.994134561s" podCreationTimestamp="2025-09-30 01:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:01:01.980035213 +0000 UTC m=+9266.290324296" watchObservedRunningTime="2025-09-30 01:01:01.994134561 +0000 UTC m=+9266.304423414" Sep 30 01:01:03 crc kubenswrapper[4922]: I0930 01:01:03.995929 4922 generic.go:334] "Generic (PLEG): container finished" podID="a465ff5e-6650-4b99-b0b7-d6986c555992" containerID="49c746461371a5c0cb0fc959c77ba23d911b5e68f53034e2541e7df88e9a7e53" exitCode=0 Sep 30 01:01:03 crc kubenswrapper[4922]: I0930 01:01:03.996060 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319901-ddsjv" event={"ID":"a465ff5e-6650-4b99-b0b7-d6986c555992","Type":"ContainerDied","Data":"49c746461371a5c0cb0fc959c77ba23d911b5e68f53034e2541e7df88e9a7e53"} Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.284971 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.325598 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-combined-ca-bundle\") pod \"a465ff5e-6650-4b99-b0b7-d6986c555992\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.325702 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-fernet-keys\") pod \"a465ff5e-6650-4b99-b0b7-d6986c555992\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.325781 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-config-data\") pod \"a465ff5e-6650-4b99-b0b7-d6986c555992\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.325935 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt2qk\" (UniqueName: \"kubernetes.io/projected/a465ff5e-6650-4b99-b0b7-d6986c555992-kube-api-access-tt2qk\") pod \"a465ff5e-6650-4b99-b0b7-d6986c555992\" (UID: \"a465ff5e-6650-4b99-b0b7-d6986c555992\") " Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.335485 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a465ff5e-6650-4b99-b0b7-d6986c555992" (UID: "a465ff5e-6650-4b99-b0b7-d6986c555992"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.335543 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a465ff5e-6650-4b99-b0b7-d6986c555992-kube-api-access-tt2qk" (OuterVolumeSpecName: "kube-api-access-tt2qk") pod "a465ff5e-6650-4b99-b0b7-d6986c555992" (UID: "a465ff5e-6650-4b99-b0b7-d6986c555992"). InnerVolumeSpecName "kube-api-access-tt2qk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.357801 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a465ff5e-6650-4b99-b0b7-d6986c555992" (UID: "a465ff5e-6650-4b99-b0b7-d6986c555992"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.423282 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-config-data" (OuterVolumeSpecName: "config-data") pod "a465ff5e-6650-4b99-b0b7-d6986c555992" (UID: "a465ff5e-6650-4b99-b0b7-d6986c555992"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.428212 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.428247 4922 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.428259 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a465ff5e-6650-4b99-b0b7-d6986c555992-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:01:06 crc kubenswrapper[4922]: I0930 01:01:06.428270 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt2qk\" (UniqueName: \"kubernetes.io/projected/a465ff5e-6650-4b99-b0b7-d6986c555992-kube-api-access-tt2qk\") on node \"crc\" DevicePath \"\"" Sep 30 01:01:07 crc kubenswrapper[4922]: I0930 01:01:07.042195 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319901-ddsjv" event={"ID":"a465ff5e-6650-4b99-b0b7-d6986c555992","Type":"ContainerDied","Data":"881da461762b313e7af5f4f9a19ca7a0f6a21c84a5a9c4b631faedb44b70c917"} Sep 30 01:01:07 crc kubenswrapper[4922]: I0930 01:01:07.042329 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319901-ddsjv" Sep 30 01:01:07 crc kubenswrapper[4922]: I0930 01:01:07.042335 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="881da461762b313e7af5f4f9a19ca7a0f6a21c84a5a9c4b631faedb44b70c917" Sep 30 01:01:58 crc kubenswrapper[4922]: I0930 01:01:58.913032 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:01:58 crc kubenswrapper[4922]: I0930 01:01:58.913816 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:02:11 crc kubenswrapper[4922]: I0930 01:02:11.924514 4922 generic.go:334] "Generic (PLEG): container finished" podID="d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" containerID="c759eec3fed87debe4616683875a358d0cdff5b59c9ae6751ab86c1f37d8b0a9" exitCode=0 Sep 30 01:02:11 crc kubenswrapper[4922]: I0930 01:02:11.924591 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" event={"ID":"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a","Type":"ContainerDied","Data":"c759eec3fed87debe4616683875a358d0cdff5b59c9ae6751ab86c1f37d8b0a9"} Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.518680 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.655640 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-agent-neutron-config-0\") pod \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.656061 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkrdw\" (UniqueName: \"kubernetes.io/projected/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-kube-api-access-mkrdw\") pod \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.656279 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-combined-ca-bundle\") pod \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.656492 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ceph\") pod \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.656895 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ssh-key\") pod \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.657057 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-inventory\") pod \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\" (UID: \"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a\") " Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.662479 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-kube-api-access-mkrdw" (OuterVolumeSpecName: "kube-api-access-mkrdw") pod "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" (UID: "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a"). InnerVolumeSpecName "kube-api-access-mkrdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.665084 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ceph" (OuterVolumeSpecName: "ceph") pod "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" (UID: "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.668037 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" (UID: "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.689571 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" (UID: "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.711274 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" (UID: "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.715457 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-inventory" (OuterVolumeSpecName: "inventory") pod "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" (UID: "d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.759786 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkrdw\" (UniqueName: \"kubernetes.io/projected/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-kube-api-access-mkrdw\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.759832 4922 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.759850 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.759862 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.759876 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.759890 4922 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.950570 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" event={"ID":"d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a","Type":"ContainerDied","Data":"0bb20f5c2bbf82081e00bf63423966fa2cab8e75f282fd028c3984d9c8d11d28"} Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.950823 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bb20f5c2bbf82081e00bf63423966fa2cab8e75f282fd028c3984d9c8d11d28" Sep 30 01:02:13 crc kubenswrapper[4922]: I0930 01:02:13.951018 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-9tk7v" Sep 30 01:02:24 crc kubenswrapper[4922]: I0930 01:02:24.239792 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 01:02:24 crc kubenswrapper[4922]: I0930 01:02:24.240473 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="ab906156-68b3-4477-aff3-05cba9fe664f" containerName="nova-cell0-conductor-conductor" containerID="cri-o://86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d" gracePeriod=30 Sep 30 01:02:24 crc kubenswrapper[4922]: I0930 01:02:24.270455 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 01:02:24 crc kubenswrapper[4922]: I0930 01:02:24.270702 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="3814c531-0b90-4d50-bc72-f9c990eeee7e" containerName="nova-cell1-conductor-conductor" containerID="cri-o://9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72" gracePeriod=30 Sep 30 01:02:24 crc kubenswrapper[4922]: E0930 01:02:24.683903 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 01:02:24 crc kubenswrapper[4922]: E0930 01:02:24.685181 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 01:02:24 crc kubenswrapper[4922]: E0930 01:02:24.686626 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 01:02:24 crc kubenswrapper[4922]: E0930 01:02:24.686656 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="ab906156-68b3-4477-aff3-05cba9fe664f" containerName="nova-cell0-conductor-conductor" Sep 30 01:02:26 crc kubenswrapper[4922]: I0930 01:02:26.003724 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 01:02:26 crc kubenswrapper[4922]: I0930 01:02:26.004250 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="fe2075ef-27e4-4e92-84d3-4178fa974985" containerName="nova-scheduler-scheduler" containerID="cri-o://78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a" gracePeriod=30 Sep 30 01:02:26 crc kubenswrapper[4922]: I0930 01:02:26.039502 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 01:02:26 crc kubenswrapper[4922]: I0930 01:02:26.039822 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-log" containerID="cri-o://eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735" gracePeriod=30 Sep 30 01:02:26 crc kubenswrapper[4922]: I0930 01:02:26.039954 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-metadata" containerID="cri-o://99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046" gracePeriod=30 Sep 30 01:02:26 crc kubenswrapper[4922]: I0930 01:02:26.051136 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 01:02:26 crc kubenswrapper[4922]: I0930 01:02:26.052666 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-log" containerID="cri-o://2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60" gracePeriod=30 Sep 30 01:02:26 crc kubenswrapper[4922]: I0930 01:02:26.052759 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-api" containerID="cri-o://99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a" gracePeriod=30 Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.085682 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.137652 4922 generic.go:334] "Generic (PLEG): container finished" podID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerID="2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60" exitCode=143 Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.137745 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b","Type":"ContainerDied","Data":"2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60"} Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.140536 4922 generic.go:334] "Generic (PLEG): container finished" podID="3814c531-0b90-4d50-bc72-f9c990eeee7e" containerID="9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72" exitCode=0 Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.140674 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3814c531-0b90-4d50-bc72-f9c990eeee7e","Type":"ContainerDied","Data":"9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72"} Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.140708 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3814c531-0b90-4d50-bc72-f9c990eeee7e","Type":"ContainerDied","Data":"13670b28740468c37c4a7d926fd783072b23941afaeb3a439d77dc82a021e46e"} Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.140724 4922 scope.go:117] "RemoveContainer" containerID="9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.140871 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.148054 4922 generic.go:334] "Generic (PLEG): container finished" podID="7fdc8509-271e-4995-af80-db635ba06700" containerID="eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735" exitCode=143 Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.148102 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fdc8509-271e-4995-af80-db635ba06700","Type":"ContainerDied","Data":"eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735"} Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.198910 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc98g\" (UniqueName: \"kubernetes.io/projected/3814c531-0b90-4d50-bc72-f9c990eeee7e-kube-api-access-vc98g\") pod \"3814c531-0b90-4d50-bc72-f9c990eeee7e\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.198994 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-combined-ca-bundle\") pod \"3814c531-0b90-4d50-bc72-f9c990eeee7e\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.199373 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-config-data\") pod \"3814c531-0b90-4d50-bc72-f9c990eeee7e\" (UID: \"3814c531-0b90-4d50-bc72-f9c990eeee7e\") " Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.213677 4922 scope.go:117] "RemoveContainer" containerID="9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.213836 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3814c531-0b90-4d50-bc72-f9c990eeee7e-kube-api-access-vc98g" (OuterVolumeSpecName: "kube-api-access-vc98g") pod "3814c531-0b90-4d50-bc72-f9c990eeee7e" (UID: "3814c531-0b90-4d50-bc72-f9c990eeee7e"). InnerVolumeSpecName "kube-api-access-vc98g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:02:27 crc kubenswrapper[4922]: E0930 01:02:27.214805 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72\": container with ID starting with 9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72 not found: ID does not exist" containerID="9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.215000 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72"} err="failed to get container status \"9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72\": rpc error: code = NotFound desc = could not find container \"9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72\": container with ID starting with 9f80b84b45dce8f7931e3d47192c8bc6a48bf3e58f053a02b9f6bc5674f03b72 not found: ID does not exist" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.239329 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-config-data" (OuterVolumeSpecName: "config-data") pod "3814c531-0b90-4d50-bc72-f9c990eeee7e" (UID: "3814c531-0b90-4d50-bc72-f9c990eeee7e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.267358 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3814c531-0b90-4d50-bc72-f9c990eeee7e" (UID: "3814c531-0b90-4d50-bc72-f9c990eeee7e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.302208 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.302253 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc98g\" (UniqueName: \"kubernetes.io/projected/3814c531-0b90-4d50-bc72-f9c990eeee7e-kube-api-access-vc98g\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.302265 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3814c531-0b90-4d50-bc72-f9c990eeee7e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.478570 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.491925 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.507561 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 01:02:27 crc kubenswrapper[4922]: E0930 01:02:27.508048 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3814c531-0b90-4d50-bc72-f9c990eeee7e" containerName="nova-cell1-conductor-conductor" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.508070 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3814c531-0b90-4d50-bc72-f9c990eeee7e" containerName="nova-cell1-conductor-conductor" Sep 30 01:02:27 crc kubenswrapper[4922]: E0930 01:02:27.508101 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" containerName="neutron-dhcp-openstack-openstack-cell1" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.508109 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" containerName="neutron-dhcp-openstack-openstack-cell1" Sep 30 01:02:27 crc kubenswrapper[4922]: E0930 01:02:27.508122 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a465ff5e-6650-4b99-b0b7-d6986c555992" containerName="keystone-cron" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.508128 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="a465ff5e-6650-4b99-b0b7-d6986c555992" containerName="keystone-cron" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.508370 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a" containerName="neutron-dhcp-openstack-openstack-cell1" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.508411 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="a465ff5e-6650-4b99-b0b7-d6986c555992" containerName="keystone-cron" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.508429 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3814c531-0b90-4d50-bc72-f9c990eeee7e" containerName="nova-cell1-conductor-conductor" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.509246 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.511537 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.519041 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.609282 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eeae82c-d551-41d2-88c9-35a7dda0ac0a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3eeae82c-d551-41d2-88c9-35a7dda0ac0a\") " pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.609383 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzwzn\" (UniqueName: \"kubernetes.io/projected/3eeae82c-d551-41d2-88c9-35a7dda0ac0a-kube-api-access-zzwzn\") pod \"nova-cell1-conductor-0\" (UID: \"3eeae82c-d551-41d2-88c9-35a7dda0ac0a\") " pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.609519 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eeae82c-d551-41d2-88c9-35a7dda0ac0a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3eeae82c-d551-41d2-88c9-35a7dda0ac0a\") " pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.710880 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eeae82c-d551-41d2-88c9-35a7dda0ac0a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3eeae82c-d551-41d2-88c9-35a7dda0ac0a\") " pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.711196 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzwzn\" (UniqueName: \"kubernetes.io/projected/3eeae82c-d551-41d2-88c9-35a7dda0ac0a-kube-api-access-zzwzn\") pod \"nova-cell1-conductor-0\" (UID: \"3eeae82c-d551-41d2-88c9-35a7dda0ac0a\") " pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.711296 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eeae82c-d551-41d2-88c9-35a7dda0ac0a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3eeae82c-d551-41d2-88c9-35a7dda0ac0a\") " pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.716757 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eeae82c-d551-41d2-88c9-35a7dda0ac0a-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"3eeae82c-d551-41d2-88c9-35a7dda0ac0a\") " pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.718940 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eeae82c-d551-41d2-88c9-35a7dda0ac0a-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"3eeae82c-d551-41d2-88c9-35a7dda0ac0a\") " pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.735382 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzwzn\" (UniqueName: \"kubernetes.io/projected/3eeae82c-d551-41d2-88c9-35a7dda0ac0a-kube-api-access-zzwzn\") pod \"nova-cell1-conductor-0\" (UID: \"3eeae82c-d551-41d2-88c9-35a7dda0ac0a\") " pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:27 crc kubenswrapper[4922]: I0930 01:02:27.824271 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:28 crc kubenswrapper[4922]: I0930 01:02:28.280743 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 01:02:28 crc kubenswrapper[4922]: I0930 01:02:28.440611 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3814c531-0b90-4d50-bc72-f9c990eeee7e" path="/var/lib/kubelet/pods/3814c531-0b90-4d50-bc72-f9c990eeee7e/volumes" Sep 30 01:02:28 crc kubenswrapper[4922]: I0930 01:02:28.913051 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:02:28 crc kubenswrapper[4922]: I0930 01:02:28.913647 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:02:28 crc kubenswrapper[4922]: W0930 01:02:28.955659 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3eeae82c_d551_41d2_88c9_35a7dda0ac0a.slice/crio-230ac4e3c34f12bb4e1500ebadfe4de219fc57ba1929d37ad993d9ca636b9632 WatchSource:0}: Error finding container 230ac4e3c34f12bb4e1500ebadfe4de219fc57ba1929d37ad993d9ca636b9632: Status 404 returned error can't find the container with id 230ac4e3c34f12bb4e1500ebadfe4de219fc57ba1929d37ad993d9ca636b9632 Sep 30 01:02:29 crc kubenswrapper[4922]: I0930 01:02:29.172659 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3eeae82c-d551-41d2-88c9-35a7dda0ac0a","Type":"ContainerStarted","Data":"230ac4e3c34f12bb4e1500ebadfe4de219fc57ba1929d37ad993d9ca636b9632"} Sep 30 01:02:29 crc kubenswrapper[4922]: E0930 01:02:29.682432 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 01:02:29 crc kubenswrapper[4922]: E0930 01:02:29.689337 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 01:02:29 crc kubenswrapper[4922]: E0930 01:02:29.695069 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Sep 30 01:02:29 crc kubenswrapper[4922]: E0930 01:02:29.695115 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="ab906156-68b3-4477-aff3-05cba9fe664f" containerName="nova-cell0-conductor-conductor" Sep 30 01:02:29 crc kubenswrapper[4922]: E0930 01:02:29.702630 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 01:02:29 crc kubenswrapper[4922]: E0930 01:02:29.704449 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 01:02:29 crc kubenswrapper[4922]: E0930 01:02:29.712723 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 01:02:29 crc kubenswrapper[4922]: E0930 01:02:29.712778 4922 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="fe2075ef-27e4-4e92-84d3-4178fa974985" containerName="nova-scheduler-scheduler" Sep 30 01:02:29 crc kubenswrapper[4922]: I0930 01:02:29.932952 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 01:02:29 crc kubenswrapper[4922]: I0930 01:02:29.938755 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.064467 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b64q5\" (UniqueName: \"kubernetes.io/projected/7fdc8509-271e-4995-af80-db635ba06700-kube-api-access-b64q5\") pod \"7fdc8509-271e-4995-af80-db635ba06700\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.064527 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-config-data\") pod \"7fdc8509-271e-4995-af80-db635ba06700\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.064553 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-combined-ca-bundle\") pod \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.064575 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-config-data\") pod \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.064611 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-combined-ca-bundle\") pod \"7fdc8509-271e-4995-af80-db635ba06700\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.064630 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fdc8509-271e-4995-af80-db635ba06700-logs\") pod \"7fdc8509-271e-4995-af80-db635ba06700\" (UID: \"7fdc8509-271e-4995-af80-db635ba06700\") " Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.064685 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npw4n\" (UniqueName: \"kubernetes.io/projected/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-kube-api-access-npw4n\") pod \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.064741 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-logs\") pod \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\" (UID: \"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b\") " Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.066470 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-logs" (OuterVolumeSpecName: "logs") pod "4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" (UID: "4fc3758b-b2ad-42b2-bbb0-ea2145ec886b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.067236 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fdc8509-271e-4995-af80-db635ba06700-logs" (OuterVolumeSpecName: "logs") pod "7fdc8509-271e-4995-af80-db635ba06700" (UID: "7fdc8509-271e-4995-af80-db635ba06700"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.070847 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fdc8509-271e-4995-af80-db635ba06700-kube-api-access-b64q5" (OuterVolumeSpecName: "kube-api-access-b64q5") pod "7fdc8509-271e-4995-af80-db635ba06700" (UID: "7fdc8509-271e-4995-af80-db635ba06700"). InnerVolumeSpecName "kube-api-access-b64q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.072486 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-kube-api-access-npw4n" (OuterVolumeSpecName: "kube-api-access-npw4n") pod "4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" (UID: "4fc3758b-b2ad-42b2-bbb0-ea2145ec886b"). InnerVolumeSpecName "kube-api-access-npw4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.098413 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-config-data" (OuterVolumeSpecName: "config-data") pod "4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" (UID: "4fc3758b-b2ad-42b2-bbb0-ea2145ec886b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.101193 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7fdc8509-271e-4995-af80-db635ba06700" (UID: "7fdc8509-271e-4995-af80-db635ba06700"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.113034 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" (UID: "4fc3758b-b2ad-42b2-bbb0-ea2145ec886b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.113341 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-config-data" (OuterVolumeSpecName: "config-data") pod "7fdc8509-271e-4995-af80-db635ba06700" (UID: "7fdc8509-271e-4995-af80-db635ba06700"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.170467 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b64q5\" (UniqueName: \"kubernetes.io/projected/7fdc8509-271e-4995-af80-db635ba06700-kube-api-access-b64q5\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.170828 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.170843 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.170853 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.170864 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fdc8509-271e-4995-af80-db635ba06700-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.170876 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fdc8509-271e-4995-af80-db635ba06700-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.170888 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npw4n\" (UniqueName: \"kubernetes.io/projected/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-kube-api-access-npw4n\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.170901 4922 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b-logs\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.186521 4922 generic.go:334] "Generic (PLEG): container finished" podID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerID="99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a" exitCode=0 Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.186577 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b","Type":"ContainerDied","Data":"99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a"} Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.186603 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4fc3758b-b2ad-42b2-bbb0-ea2145ec886b","Type":"ContainerDied","Data":"22d1cd90b71673ba7282ecaab8aa1102e71686b45095eeee9435c74653fd5fb5"} Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.186619 4922 scope.go:117] "RemoveContainer" containerID="99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.186727 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.204714 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"3eeae82c-d551-41d2-88c9-35a7dda0ac0a","Type":"ContainerStarted","Data":"c2dbf5a27393b81dec8f082733b03b7f29e804f489702d2f0db0c4120a5bb434"} Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.204777 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.215254 4922 generic.go:334] "Generic (PLEG): container finished" podID="7fdc8509-271e-4995-af80-db635ba06700" containerID="99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046" exitCode=0 Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.215300 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fdc8509-271e-4995-af80-db635ba06700","Type":"ContainerDied","Data":"99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046"} Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.215328 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7fdc8509-271e-4995-af80-db635ba06700","Type":"ContainerDied","Data":"60979eb8f2eb13bc22c6c455f6d3b74029107191896063179c88c23f9e8682f0"} Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.215324 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.226909 4922 scope.go:117] "RemoveContainer" containerID="2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.270423 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.270391989 podStartE2EDuration="3.270391989s" podCreationTimestamp="2025-09-30 01:02:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:02:30.223590382 +0000 UTC m=+9354.533879195" watchObservedRunningTime="2025-09-30 01:02:30.270391989 +0000 UTC m=+9354.580680802" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.286507 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.308240 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.308296 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 01:02:30 crc kubenswrapper[4922]: E0930 01:02:30.308705 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-metadata" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.308717 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-metadata" Sep 30 01:02:30 crc kubenswrapper[4922]: E0930 01:02:30.308732 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-api" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.308738 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-api" Sep 30 01:02:30 crc kubenswrapper[4922]: E0930 01:02:30.308749 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-log" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.308754 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-log" Sep 30 01:02:30 crc kubenswrapper[4922]: E0930 01:02:30.308776 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-log" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.308782 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-log" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.308954 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-log" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.308966 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-log" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.308978 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fdc8509-271e-4995-af80-db635ba06700" containerName="nova-metadata-metadata" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.309003 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" containerName="nova-api-api" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.309951 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.310019 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.328296 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.328334 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv"] Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.329689 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.333332 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.338001 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.338180 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.338295 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.338575 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.338694 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-cznhl" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.338802 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.338942 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.352537 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.372234 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.374090 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.374871 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51d744d8-84a2-4a45-8043-8bf0594dde75-config-data\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.374918 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp2nl\" (UniqueName: \"kubernetes.io/projected/51d744d8-84a2-4a45-8043-8bf0594dde75-kube-api-access-qp2nl\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.375111 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51d744d8-84a2-4a45-8043-8bf0594dde75-logs\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.375146 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d744d8-84a2-4a45-8043-8bf0594dde75-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.377245 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.392463 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv"] Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.407383 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.433202 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fc3758b-b2ad-42b2-bbb0-ea2145ec886b" path="/var/lib/kubelet/pods/4fc3758b-b2ad-42b2-bbb0-ea2145ec886b/volumes" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.435810 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fdc8509-271e-4995-af80-db635ba06700" path="/var/lib/kubelet/pods/7fdc8509-271e-4995-af80-db635ba06700/volumes" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477158 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477218 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477249 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477267 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ndb4\" (UniqueName: \"kubernetes.io/projected/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-kube-api-access-2ndb4\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477305 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51d744d8-84a2-4a45-8043-8bf0594dde75-config-data\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477326 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477356 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp2nl\" (UniqueName: \"kubernetes.io/projected/51d744d8-84a2-4a45-8043-8bf0594dde75-kube-api-access-qp2nl\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477379 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477417 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477441 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c3399b-495f-48e0-aaea-eed2883b5feb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477456 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477495 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c3399b-495f-48e0-aaea-eed2883b5feb-config-data\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477533 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477563 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51d744d8-84a2-4a45-8043-8bf0594dde75-logs\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477576 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d744d8-84a2-4a45-8043-8bf0594dde75-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477610 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7c3399b-495f-48e0-aaea-eed2883b5feb-logs\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477642 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477686 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.477715 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8blgl\" (UniqueName: \"kubernetes.io/projected/d7c3399b-495f-48e0-aaea-eed2883b5feb-kube-api-access-8blgl\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.478868 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51d744d8-84a2-4a45-8043-8bf0594dde75-logs\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580114 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580185 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580220 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580244 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ndb4\" (UniqueName: \"kubernetes.io/projected/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-kube-api-access-2ndb4\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580299 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580345 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580376 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580429 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c3399b-495f-48e0-aaea-eed2883b5feb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580452 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580497 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c3399b-495f-48e0-aaea-eed2883b5feb-config-data\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580547 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580608 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7c3399b-495f-48e0-aaea-eed2883b5feb-logs\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580646 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580699 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.580723 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8blgl\" (UniqueName: \"kubernetes.io/projected/d7c3399b-495f-48e0-aaea-eed2883b5feb-kube-api-access-8blgl\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.581600 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.582036 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7c3399b-495f-48e0-aaea-eed2883b5feb-logs\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.582642 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.857309 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d744d8-84a2-4a45-8043-8bf0594dde75-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.858206 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51d744d8-84a2-4a45-8043-8bf0594dde75-config-data\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.859068 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.859291 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7c3399b-495f-48e0-aaea-eed2883b5feb-config-data\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.859301 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.859686 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp2nl\" (UniqueName: \"kubernetes.io/projected/51d744d8-84a2-4a45-8043-8bf0594dde75-kube-api-access-qp2nl\") pod \"nova-api-0\" (UID: \"51d744d8-84a2-4a45-8043-8bf0594dde75\") " pod="openstack/nova-api-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.859732 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7c3399b-495f-48e0-aaea-eed2883b5feb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.860439 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.860626 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.860984 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.861887 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.862855 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.869973 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8blgl\" (UniqueName: \"kubernetes.io/projected/d7c3399b-495f-48e0-aaea-eed2883b5feb-kube-api-access-8blgl\") pod \"nova-metadata-0\" (UID: \"d7c3399b-495f-48e0-aaea-eed2883b5feb\") " pod="openstack/nova-metadata-0" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.870842 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ndb4\" (UniqueName: \"kubernetes.io/projected/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-kube-api-access-2ndb4\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:30 crc kubenswrapper[4922]: I0930 01:02:30.872171 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.019011 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.028852 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.029557 4922 scope.go:117] "RemoveContainer" containerID="99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a" Sep 30 01:02:31 crc kubenswrapper[4922]: E0930 01:02:31.030147 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a\": container with ID starting with 99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a not found: ID does not exist" containerID="99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.030195 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a"} err="failed to get container status \"99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a\": rpc error: code = NotFound desc = could not find container \"99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a\": container with ID starting with 99f5dfcf29fd19d658fb09c2524370b5e5e5b8f980740dd997b65b8a9004127a not found: ID does not exist" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.030224 4922 scope.go:117] "RemoveContainer" containerID="2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60" Sep 30 01:02:31 crc kubenswrapper[4922]: E0930 01:02:31.031594 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60\": container with ID starting with 2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60 not found: ID does not exist" containerID="2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.031631 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60"} err="failed to get container status \"2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60\": rpc error: code = NotFound desc = could not find container \"2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60\": container with ID starting with 2f01b064102d8b61ef7129c488155a07bfcc8b14193fa5dd3ae8c4c137107f60 not found: ID does not exist" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.031648 4922 scope.go:117] "RemoveContainer" containerID="99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.043910 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.095904 4922 scope.go:117] "RemoveContainer" containerID="eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.249377 4922 scope.go:117] "RemoveContainer" containerID="99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046" Sep 30 01:02:31 crc kubenswrapper[4922]: E0930 01:02:31.249903 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046\": container with ID starting with 99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046 not found: ID does not exist" containerID="99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.249942 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046"} err="failed to get container status \"99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046\": rpc error: code = NotFound desc = could not find container \"99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046\": container with ID starting with 99bc87d009ee42ff19ca38669002530fb1e2bf10d12d2af93333c9ba67a0c046 not found: ID does not exist" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.249967 4922 scope.go:117] "RemoveContainer" containerID="eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735" Sep 30 01:02:31 crc kubenswrapper[4922]: E0930 01:02:31.250340 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735\": container with ID starting with eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735 not found: ID does not exist" containerID="eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.250360 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735"} err="failed to get container status \"eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735\": rpc error: code = NotFound desc = could not find container \"eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735\": container with ID starting with eb07e984fc84fbb98533fee8d4f45e296e4d7e5e176e555c8c021b5fb52ad735 not found: ID does not exist" Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.557933 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.646201 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 01:02:31 crc kubenswrapper[4922]: I0930 01:02:31.758523 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv"] Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.253750 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" event={"ID":"22c6a55f-61a1-4731-b0d7-2864a91aa8ec","Type":"ContainerStarted","Data":"91723c4b05c61709fbe9cd96d3f8da96f5fa33d82185984be4159c63ffe87b2f"} Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.254029 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" event={"ID":"22c6a55f-61a1-4731-b0d7-2864a91aa8ec","Type":"ContainerStarted","Data":"6f95ff284a9c477b2d7e3019dd8866dc4de97c728002090bbaa0f1722527c288"} Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.263855 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d7c3399b-495f-48e0-aaea-eed2883b5feb","Type":"ContainerStarted","Data":"85f2d5195e770eaaa2c64db09a4297ef7b96681968336359be46231b46a1b575"} Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.263919 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d7c3399b-495f-48e0-aaea-eed2883b5feb","Type":"ContainerStarted","Data":"36e5d893e653d9ed7daede547f92d9531079b35c037f367f631adfd41033c194"} Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.263936 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d7c3399b-495f-48e0-aaea-eed2883b5feb","Type":"ContainerStarted","Data":"744039d8330a2a39d322069930f34b4f8b994615f86c980f3115a9042f5fd79b"} Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.265913 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"51d744d8-84a2-4a45-8043-8bf0594dde75","Type":"ContainerStarted","Data":"73a0b9df23629873f22894125a88b133c4db8a6e7d2127262ca2b9ce35fdccd5"} Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.265939 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"51d744d8-84a2-4a45-8043-8bf0594dde75","Type":"ContainerStarted","Data":"77b90be8cc16aa63d3c4f52466ff3b76bc0b5d5d68bbb01f93fa38a4bc35783d"} Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.265953 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"51d744d8-84a2-4a45-8043-8bf0594dde75","Type":"ContainerStarted","Data":"7a74831150a74bffe76c1af1ebfb5bf20c27420f189b234da6a48d92056c5391"} Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.277942 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" podStartSLOduration=2.0645396 podStartE2EDuration="2.277923026s" podCreationTimestamp="2025-09-30 01:02:30 +0000 UTC" firstStartedPulling="2025-09-30 01:02:31.759784415 +0000 UTC m=+9356.070073228" lastFinishedPulling="2025-09-30 01:02:31.973167841 +0000 UTC m=+9356.283456654" observedRunningTime="2025-09-30 01:02:32.274556213 +0000 UTC m=+9356.584845026" watchObservedRunningTime="2025-09-30 01:02:32.277923026 +0000 UTC m=+9356.588211839" Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.303871 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.303846947 podStartE2EDuration="2.303846947s" podCreationTimestamp="2025-09-30 01:02:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:02:32.296617238 +0000 UTC m=+9356.606906051" watchObservedRunningTime="2025-09-30 01:02:32.303846947 +0000 UTC m=+9356.614135770" Sep 30 01:02:32 crc kubenswrapper[4922]: I0930 01:02:32.328637 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.32862148 podStartE2EDuration="2.32862148s" podCreationTimestamp="2025-09-30 01:02:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:02:32.321965485 +0000 UTC m=+9356.632254298" watchObservedRunningTime="2025-09-30 01:02:32.32862148 +0000 UTC m=+9356.638910293" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.296344 4922 generic.go:334] "Generic (PLEG): container finished" podID="fe2075ef-27e4-4e92-84d3-4178fa974985" containerID="78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a" exitCode=0 Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.296479 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fe2075ef-27e4-4e92-84d3-4178fa974985","Type":"ContainerDied","Data":"78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a"} Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.299735 4922 generic.go:334] "Generic (PLEG): container finished" podID="ab906156-68b3-4477-aff3-05cba9fe664f" containerID="86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d" exitCode=0 Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.299784 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ab906156-68b3-4477-aff3-05cba9fe664f","Type":"ContainerDied","Data":"86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d"} Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.616000 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.680997 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-config-data\") pod \"ab906156-68b3-4477-aff3-05cba9fe664f\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.681073 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfh2l\" (UniqueName: \"kubernetes.io/projected/ab906156-68b3-4477-aff3-05cba9fe664f-kube-api-access-jfh2l\") pod \"ab906156-68b3-4477-aff3-05cba9fe664f\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.681259 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-combined-ca-bundle\") pod \"ab906156-68b3-4477-aff3-05cba9fe664f\" (UID: \"ab906156-68b3-4477-aff3-05cba9fe664f\") " Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.686862 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab906156-68b3-4477-aff3-05cba9fe664f-kube-api-access-jfh2l" (OuterVolumeSpecName: "kube-api-access-jfh2l") pod "ab906156-68b3-4477-aff3-05cba9fe664f" (UID: "ab906156-68b3-4477-aff3-05cba9fe664f"). InnerVolumeSpecName "kube-api-access-jfh2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:02:34 crc kubenswrapper[4922]: E0930 01:02:34.706099 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a is running failed: container process not found" containerID="78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 01:02:34 crc kubenswrapper[4922]: E0930 01:02:34.706606 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a is running failed: container process not found" containerID="78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 01:02:34 crc kubenswrapper[4922]: E0930 01:02:34.706941 4922 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a is running failed: container process not found" containerID="78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 01:02:34 crc kubenswrapper[4922]: E0930 01:02:34.707002 4922 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="fe2075ef-27e4-4e92-84d3-4178fa974985" containerName="nova-scheduler-scheduler" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.710440 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-config-data" (OuterVolumeSpecName: "config-data") pod "ab906156-68b3-4477-aff3-05cba9fe664f" (UID: "ab906156-68b3-4477-aff3-05cba9fe664f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.719651 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.726194 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab906156-68b3-4477-aff3-05cba9fe664f" (UID: "ab906156-68b3-4477-aff3-05cba9fe664f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.783649 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-config-data\") pod \"fe2075ef-27e4-4e92-84d3-4178fa974985\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.783853 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxl6n\" (UniqueName: \"kubernetes.io/projected/fe2075ef-27e4-4e92-84d3-4178fa974985-kube-api-access-gxl6n\") pod \"fe2075ef-27e4-4e92-84d3-4178fa974985\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.783998 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-combined-ca-bundle\") pod \"fe2075ef-27e4-4e92-84d3-4178fa974985\" (UID: \"fe2075ef-27e4-4e92-84d3-4178fa974985\") " Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.784452 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.784471 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfh2l\" (UniqueName: \"kubernetes.io/projected/ab906156-68b3-4477-aff3-05cba9fe664f-kube-api-access-jfh2l\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.784481 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab906156-68b3-4477-aff3-05cba9fe664f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.787162 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe2075ef-27e4-4e92-84d3-4178fa974985-kube-api-access-gxl6n" (OuterVolumeSpecName: "kube-api-access-gxl6n") pod "fe2075ef-27e4-4e92-84d3-4178fa974985" (UID: "fe2075ef-27e4-4e92-84d3-4178fa974985"). InnerVolumeSpecName "kube-api-access-gxl6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.813514 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe2075ef-27e4-4e92-84d3-4178fa974985" (UID: "fe2075ef-27e4-4e92-84d3-4178fa974985"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.816517 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-config-data" (OuterVolumeSpecName: "config-data") pod "fe2075ef-27e4-4e92-84d3-4178fa974985" (UID: "fe2075ef-27e4-4e92-84d3-4178fa974985"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.886235 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxl6n\" (UniqueName: \"kubernetes.io/projected/fe2075ef-27e4-4e92-84d3-4178fa974985-kube-api-access-gxl6n\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.886266 4922 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:34 crc kubenswrapper[4922]: I0930 01:02:34.886275 4922 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe2075ef-27e4-4e92-84d3-4178fa974985-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.311894 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fe2075ef-27e4-4e92-84d3-4178fa974985","Type":"ContainerDied","Data":"f3e17dbf19c860b14a4c9b01af5caa767383f3474fe3516b5e00ebd1f7c4b1e5"} Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.312263 4922 scope.go:117] "RemoveContainer" containerID="78ed5f74c4c4b7984cb9009c49ad17de9790d5c4cfe91defd5837fe6c1a45f8a" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.311912 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.315002 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ab906156-68b3-4477-aff3-05cba9fe664f","Type":"ContainerDied","Data":"ddb859d95be8a49470f23aa5f5329cf01bc2a56a643ae454b7f4b76a4a8c67af"} Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.315111 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.355660 4922 scope.go:117] "RemoveContainer" containerID="86824029fccf804fc905e44c0c16ce1482c92b8b58f88f5b65cac3436da0935d" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.386280 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.407980 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.426372 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.451424 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.473380 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 01:02:35 crc kubenswrapper[4922]: E0930 01:02:35.474223 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab906156-68b3-4477-aff3-05cba9fe664f" containerName="nova-cell0-conductor-conductor" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.474243 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab906156-68b3-4477-aff3-05cba9fe664f" containerName="nova-cell0-conductor-conductor" Sep 30 01:02:35 crc kubenswrapper[4922]: E0930 01:02:35.474302 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe2075ef-27e4-4e92-84d3-4178fa974985" containerName="nova-scheduler-scheduler" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.474313 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe2075ef-27e4-4e92-84d3-4178fa974985" containerName="nova-scheduler-scheduler" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.476867 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe2075ef-27e4-4e92-84d3-4178fa974985" containerName="nova-scheduler-scheduler" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.476915 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab906156-68b3-4477-aff3-05cba9fe664f" containerName="nova-cell0-conductor-conductor" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.478793 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.484675 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.485293 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.486609 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.491573 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.500438 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.509962 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.602837 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/263ec1aa-f475-4b34-950d-c93301af9645-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"263ec1aa-f475-4b34-950d-c93301af9645\") " pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.603481 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/263ec1aa-f475-4b34-950d-c93301af9645-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"263ec1aa-f475-4b34-950d-c93301af9645\") " pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.603691 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnhkb\" (UniqueName: \"kubernetes.io/projected/263ec1aa-f475-4b34-950d-c93301af9645-kube-api-access-bnhkb\") pod \"nova-cell0-conductor-0\" (UID: \"263ec1aa-f475-4b34-950d-c93301af9645\") " pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.603734 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c\") " pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.603767 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8bjv\" (UniqueName: \"kubernetes.io/projected/b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c-kube-api-access-f8bjv\") pod \"nova-scheduler-0\" (UID: \"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c\") " pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.603830 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c-config-data\") pod \"nova-scheduler-0\" (UID: \"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c\") " pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.705372 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/263ec1aa-f475-4b34-950d-c93301af9645-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"263ec1aa-f475-4b34-950d-c93301af9645\") " pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.705458 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnhkb\" (UniqueName: \"kubernetes.io/projected/263ec1aa-f475-4b34-950d-c93301af9645-kube-api-access-bnhkb\") pod \"nova-cell0-conductor-0\" (UID: \"263ec1aa-f475-4b34-950d-c93301af9645\") " pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.705507 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c\") " pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.705538 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8bjv\" (UniqueName: \"kubernetes.io/projected/b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c-kube-api-access-f8bjv\") pod \"nova-scheduler-0\" (UID: \"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c\") " pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.705573 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c-config-data\") pod \"nova-scheduler-0\" (UID: \"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c\") " pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.705644 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/263ec1aa-f475-4b34-950d-c93301af9645-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"263ec1aa-f475-4b34-950d-c93301af9645\") " pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.710966 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/263ec1aa-f475-4b34-950d-c93301af9645-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"263ec1aa-f475-4b34-950d-c93301af9645\") " pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.711019 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/263ec1aa-f475-4b34-950d-c93301af9645-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"263ec1aa-f475-4b34-950d-c93301af9645\") " pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.711620 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c-config-data\") pod \"nova-scheduler-0\" (UID: \"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c\") " pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.719256 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c\") " pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.722953 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnhkb\" (UniqueName: \"kubernetes.io/projected/263ec1aa-f475-4b34-950d-c93301af9645-kube-api-access-bnhkb\") pod \"nova-cell0-conductor-0\" (UID: \"263ec1aa-f475-4b34-950d-c93301af9645\") " pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.722988 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8bjv\" (UniqueName: \"kubernetes.io/projected/b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c-kube-api-access-f8bjv\") pod \"nova-scheduler-0\" (UID: \"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c\") " pod="openstack/nova-scheduler-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.823622 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:35 crc kubenswrapper[4922]: I0930 01:02:35.833133 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 01:02:36 crc kubenswrapper[4922]: I0930 01:02:36.044761 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 01:02:36 crc kubenswrapper[4922]: I0930 01:02:36.045132 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 01:02:36 crc kubenswrapper[4922]: I0930 01:02:36.308305 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 01:02:36 crc kubenswrapper[4922]: W0930 01:02:36.315377 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod263ec1aa_f475_4b34_950d_c93301af9645.slice/crio-fd75ffaf3d8fdaa02063176d3d59aeb7c4e75cb42c6adcb1949893aad7b4c018 WatchSource:0}: Error finding container fd75ffaf3d8fdaa02063176d3d59aeb7c4e75cb42c6adcb1949893aad7b4c018: Status 404 returned error can't find the container with id fd75ffaf3d8fdaa02063176d3d59aeb7c4e75cb42c6adcb1949893aad7b4c018 Sep 30 01:02:36 crc kubenswrapper[4922]: I0930 01:02:36.318316 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 01:02:36 crc kubenswrapper[4922]: I0930 01:02:36.329454 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c","Type":"ContainerStarted","Data":"61bbe9bdca6d418eaccd7416b7d5d182effa230c46f6a0c54cc56619bbec70d6"} Sep 30 01:02:36 crc kubenswrapper[4922]: I0930 01:02:36.442114 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab906156-68b3-4477-aff3-05cba9fe664f" path="/var/lib/kubelet/pods/ab906156-68b3-4477-aff3-05cba9fe664f/volumes" Sep 30 01:02:36 crc kubenswrapper[4922]: I0930 01:02:36.442909 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe2075ef-27e4-4e92-84d3-4178fa974985" path="/var/lib/kubelet/pods/fe2075ef-27e4-4e92-84d3-4178fa974985/volumes" Sep 30 01:02:37 crc kubenswrapper[4922]: I0930 01:02:37.352503 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c","Type":"ContainerStarted","Data":"f1f4bf5da192256d0f112b93e35fedec4157fa88bdc9f96e6712cdb3c9de5529"} Sep 30 01:02:37 crc kubenswrapper[4922]: I0930 01:02:37.356494 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"263ec1aa-f475-4b34-950d-c93301af9645","Type":"ContainerStarted","Data":"8fdfbecde1094f8f21fea54f0350ffb9087169389cd0d74495b92cce296eabd8"} Sep 30 01:02:37 crc kubenswrapper[4922]: I0930 01:02:37.356539 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"263ec1aa-f475-4b34-950d-c93301af9645","Type":"ContainerStarted","Data":"fd75ffaf3d8fdaa02063176d3d59aeb7c4e75cb42c6adcb1949893aad7b4c018"} Sep 30 01:02:37 crc kubenswrapper[4922]: I0930 01:02:37.356737 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:37 crc kubenswrapper[4922]: I0930 01:02:37.380347 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.380321236 podStartE2EDuration="2.380321236s" podCreationTimestamp="2025-09-30 01:02:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:02:37.37320817 +0000 UTC m=+9361.683497023" watchObservedRunningTime="2025-09-30 01:02:37.380321236 +0000 UTC m=+9361.690610079" Sep 30 01:02:37 crc kubenswrapper[4922]: I0930 01:02:37.412912 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.412881161 podStartE2EDuration="2.412881161s" podCreationTimestamp="2025-09-30 01:02:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 01:02:37.400913665 +0000 UTC m=+9361.711202488" watchObservedRunningTime="2025-09-30 01:02:37.412881161 +0000 UTC m=+9361.723170024" Sep 30 01:02:37 crc kubenswrapper[4922]: I0930 01:02:37.869990 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 01:02:40 crc kubenswrapper[4922]: I0930 01:02:40.833300 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 01:02:41 crc kubenswrapper[4922]: I0930 01:02:41.019717 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 01:02:41 crc kubenswrapper[4922]: I0930 01:02:41.019778 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 01:02:41 crc kubenswrapper[4922]: I0930 01:02:41.045117 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 01:02:41 crc kubenswrapper[4922]: I0930 01:02:41.045170 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 01:02:42 crc kubenswrapper[4922]: I0930 01:02:42.102726 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="51d744d8-84a2-4a45-8043-8bf0594dde75" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 01:02:42 crc kubenswrapper[4922]: I0930 01:02:42.185642 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="51d744d8-84a2-4a45-8043-8bf0594dde75" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 01:02:42 crc kubenswrapper[4922]: I0930 01:02:42.185627 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d7c3399b-495f-48e0-aaea-eed2883b5feb" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.193:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 01:02:42 crc kubenswrapper[4922]: I0930 01:02:42.185977 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d7c3399b-495f-48e0-aaea-eed2883b5feb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.193:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 01:02:45 crc kubenswrapper[4922]: I0930 01:02:45.834134 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 01:02:45 crc kubenswrapper[4922]: I0930 01:02:45.867772 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 01:02:45 crc kubenswrapper[4922]: I0930 01:02:45.869265 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 01:02:46 crc kubenswrapper[4922]: I0930 01:02:46.514487 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.023844 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.024684 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.024937 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.031001 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.052053 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.052927 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.054334 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.513253 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.517334 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 01:02:51 crc kubenswrapper[4922]: I0930 01:02:51.521953 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 01:02:58 crc kubenswrapper[4922]: I0930 01:02:58.913238 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:02:58 crc kubenswrapper[4922]: I0930 01:02:58.913841 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:02:58 crc kubenswrapper[4922]: I0930 01:02:58.913900 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 01:02:58 crc kubenswrapper[4922]: I0930 01:02:58.914987 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6686e73c3e2aba1d57d05c28c5f8c044d54fdff0633d242879d8a8c88e13fb85"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:02:58 crc kubenswrapper[4922]: I0930 01:02:58.915080 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://6686e73c3e2aba1d57d05c28c5f8c044d54fdff0633d242879d8a8c88e13fb85" gracePeriod=600 Sep 30 01:02:59 crc kubenswrapper[4922]: I0930 01:02:59.621034 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="6686e73c3e2aba1d57d05c28c5f8c044d54fdff0633d242879d8a8c88e13fb85" exitCode=0 Sep 30 01:02:59 crc kubenswrapper[4922]: I0930 01:02:59.621190 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"6686e73c3e2aba1d57d05c28c5f8c044d54fdff0633d242879d8a8c88e13fb85"} Sep 30 01:02:59 crc kubenswrapper[4922]: I0930 01:02:59.621633 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3"} Sep 30 01:02:59 crc kubenswrapper[4922]: I0930 01:02:59.621655 4922 scope.go:117] "RemoveContainer" containerID="2930186984eb5080956e02f697907c611a6652f6965e8503769c93009fd53300" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.603119 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h6t84"] Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.608523 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.618633 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h6t84"] Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.733743 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-catalog-content\") pod \"redhat-marketplace-h6t84\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.733912 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cncd9\" (UniqueName: \"kubernetes.io/projected/3bdd7cb7-708c-450e-869b-66aaae27210f-kube-api-access-cncd9\") pod \"redhat-marketplace-h6t84\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.734086 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-utilities\") pod \"redhat-marketplace-h6t84\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.836589 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-utilities\") pod \"redhat-marketplace-h6t84\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.837167 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-catalog-content\") pod \"redhat-marketplace-h6t84\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.837276 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-utilities\") pod \"redhat-marketplace-h6t84\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.837479 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cncd9\" (UniqueName: \"kubernetes.io/projected/3bdd7cb7-708c-450e-869b-66aaae27210f-kube-api-access-cncd9\") pod \"redhat-marketplace-h6t84\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.837837 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-catalog-content\") pod \"redhat-marketplace-h6t84\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.869534 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cncd9\" (UniqueName: \"kubernetes.io/projected/3bdd7cb7-708c-450e-869b-66aaae27210f-kube-api-access-cncd9\") pod \"redhat-marketplace-h6t84\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:07 crc kubenswrapper[4922]: I0930 01:05:07.952530 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:08 crc kubenswrapper[4922]: I0930 01:05:08.447287 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h6t84"] Sep 30 01:05:09 crc kubenswrapper[4922]: I0930 01:05:09.227345 4922 generic.go:334] "Generic (PLEG): container finished" podID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerID="51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477" exitCode=0 Sep 30 01:05:09 crc kubenswrapper[4922]: I0930 01:05:09.227567 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h6t84" event={"ID":"3bdd7cb7-708c-450e-869b-66aaae27210f","Type":"ContainerDied","Data":"51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477"} Sep 30 01:05:09 crc kubenswrapper[4922]: I0930 01:05:09.228477 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h6t84" event={"ID":"3bdd7cb7-708c-450e-869b-66aaae27210f","Type":"ContainerStarted","Data":"9bd3a5bd44a64756b791c0f5c7e1838a4c3f141a470b50abec8a65f6cc8edc8b"} Sep 30 01:05:09 crc kubenswrapper[4922]: I0930 01:05:09.233579 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:05:10 crc kubenswrapper[4922]: I0930 01:05:10.246740 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h6t84" event={"ID":"3bdd7cb7-708c-450e-869b-66aaae27210f","Type":"ContainerStarted","Data":"eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c"} Sep 30 01:05:11 crc kubenswrapper[4922]: I0930 01:05:11.274387 4922 generic.go:334] "Generic (PLEG): container finished" podID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerID="eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c" exitCode=0 Sep 30 01:05:11 crc kubenswrapper[4922]: I0930 01:05:11.274954 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h6t84" event={"ID":"3bdd7cb7-708c-450e-869b-66aaae27210f","Type":"ContainerDied","Data":"eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c"} Sep 30 01:05:12 crc kubenswrapper[4922]: I0930 01:05:12.284759 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h6t84" event={"ID":"3bdd7cb7-708c-450e-869b-66aaae27210f","Type":"ContainerStarted","Data":"d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a"} Sep 30 01:05:12 crc kubenswrapper[4922]: I0930 01:05:12.310267 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h6t84" podStartSLOduration=2.8305080460000003 podStartE2EDuration="5.310249039s" podCreationTimestamp="2025-09-30 01:05:07 +0000 UTC" firstStartedPulling="2025-09-30 01:05:09.233108256 +0000 UTC m=+9513.543397109" lastFinishedPulling="2025-09-30 01:05:11.712849249 +0000 UTC m=+9516.023138102" observedRunningTime="2025-09-30 01:05:12.304075327 +0000 UTC m=+9516.614364140" watchObservedRunningTime="2025-09-30 01:05:12.310249039 +0000 UTC m=+9516.620537852" Sep 30 01:05:17 crc kubenswrapper[4922]: I0930 01:05:17.952921 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:17 crc kubenswrapper[4922]: I0930 01:05:17.953885 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:18 crc kubenswrapper[4922]: I0930 01:05:18.032195 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:18 crc kubenswrapper[4922]: I0930 01:05:18.444483 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:18 crc kubenswrapper[4922]: I0930 01:05:18.514244 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h6t84"] Sep 30 01:05:20 crc kubenswrapper[4922]: I0930 01:05:20.402153 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h6t84" podUID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerName="registry-server" containerID="cri-o://d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a" gracePeriod=2 Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.231423 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.368759 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-utilities\") pod \"3bdd7cb7-708c-450e-869b-66aaae27210f\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.368845 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cncd9\" (UniqueName: \"kubernetes.io/projected/3bdd7cb7-708c-450e-869b-66aaae27210f-kube-api-access-cncd9\") pod \"3bdd7cb7-708c-450e-869b-66aaae27210f\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.368994 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-catalog-content\") pod \"3bdd7cb7-708c-450e-869b-66aaae27210f\" (UID: \"3bdd7cb7-708c-450e-869b-66aaae27210f\") " Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.369900 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-utilities" (OuterVolumeSpecName: "utilities") pod "3bdd7cb7-708c-450e-869b-66aaae27210f" (UID: "3bdd7cb7-708c-450e-869b-66aaae27210f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.375824 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bdd7cb7-708c-450e-869b-66aaae27210f-kube-api-access-cncd9" (OuterVolumeSpecName: "kube-api-access-cncd9") pod "3bdd7cb7-708c-450e-869b-66aaae27210f" (UID: "3bdd7cb7-708c-450e-869b-66aaae27210f"). InnerVolumeSpecName "kube-api-access-cncd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.381953 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3bdd7cb7-708c-450e-869b-66aaae27210f" (UID: "3bdd7cb7-708c-450e-869b-66aaae27210f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.419166 4922 generic.go:334] "Generic (PLEG): container finished" podID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerID="d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a" exitCode=0 Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.419216 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h6t84" event={"ID":"3bdd7cb7-708c-450e-869b-66aaae27210f","Type":"ContainerDied","Data":"d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a"} Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.419246 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h6t84" event={"ID":"3bdd7cb7-708c-450e-869b-66aaae27210f","Type":"ContainerDied","Data":"9bd3a5bd44a64756b791c0f5c7e1838a4c3f141a470b50abec8a65f6cc8edc8b"} Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.419265 4922 scope.go:117] "RemoveContainer" containerID="d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.419550 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h6t84" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.451821 4922 scope.go:117] "RemoveContainer" containerID="eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.471795 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.471830 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bdd7cb7-708c-450e-869b-66aaae27210f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.471845 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cncd9\" (UniqueName: \"kubernetes.io/projected/3bdd7cb7-708c-450e-869b-66aaae27210f-kube-api-access-cncd9\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.482624 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h6t84"] Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.497435 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h6t84"] Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.507371 4922 scope.go:117] "RemoveContainer" containerID="51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.531914 4922 scope.go:117] "RemoveContainer" containerID="d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a" Sep 30 01:05:21 crc kubenswrapper[4922]: E0930 01:05:21.532290 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a\": container with ID starting with d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a not found: ID does not exist" containerID="d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.532317 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a"} err="failed to get container status \"d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a\": rpc error: code = NotFound desc = could not find container \"d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a\": container with ID starting with d8353072b03984d54cc01bca75c355d755c600b90f9287be39e1959f16fff68a not found: ID does not exist" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.532335 4922 scope.go:117] "RemoveContainer" containerID="eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c" Sep 30 01:05:21 crc kubenswrapper[4922]: E0930 01:05:21.532844 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c\": container with ID starting with eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c not found: ID does not exist" containerID="eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.532864 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c"} err="failed to get container status \"eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c\": rpc error: code = NotFound desc = could not find container \"eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c\": container with ID starting with eab9d2da2d3bb0502630908d693494d9024e2e771688e7bef3d57ee45fe5ee8c not found: ID does not exist" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.532878 4922 scope.go:117] "RemoveContainer" containerID="51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477" Sep 30 01:05:21 crc kubenswrapper[4922]: E0930 01:05:21.533181 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477\": container with ID starting with 51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477 not found: ID does not exist" containerID="51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477" Sep 30 01:05:21 crc kubenswrapper[4922]: I0930 01:05:21.533245 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477"} err="failed to get container status \"51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477\": rpc error: code = NotFound desc = could not find container \"51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477\": container with ID starting with 51bb8df97746e4cb7b9f4f42dde179268ae76496efa87feb05cae79514b26477 not found: ID does not exist" Sep 30 01:05:22 crc kubenswrapper[4922]: I0930 01:05:22.443686 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bdd7cb7-708c-450e-869b-66aaae27210f" path="/var/lib/kubelet/pods/3bdd7cb7-708c-450e-869b-66aaae27210f/volumes" Sep 30 01:05:28 crc kubenswrapper[4922]: I0930 01:05:28.912540 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:05:28 crc kubenswrapper[4922]: I0930 01:05:28.913219 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:05:45 crc kubenswrapper[4922]: I0930 01:05:45.857899 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9fmgm"] Sep 30 01:05:45 crc kubenswrapper[4922]: E0930 01:05:45.859314 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerName="registry-server" Sep 30 01:05:45 crc kubenswrapper[4922]: I0930 01:05:45.859338 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerName="registry-server" Sep 30 01:05:45 crc kubenswrapper[4922]: E0930 01:05:45.859431 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerName="extract-utilities" Sep 30 01:05:45 crc kubenswrapper[4922]: I0930 01:05:45.859445 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerName="extract-utilities" Sep 30 01:05:45 crc kubenswrapper[4922]: E0930 01:05:45.859465 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerName="extract-content" Sep 30 01:05:45 crc kubenswrapper[4922]: I0930 01:05:45.859505 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerName="extract-content" Sep 30 01:05:45 crc kubenswrapper[4922]: I0930 01:05:45.859916 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bdd7cb7-708c-450e-869b-66aaae27210f" containerName="registry-server" Sep 30 01:05:45 crc kubenswrapper[4922]: I0930 01:05:45.862920 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:45 crc kubenswrapper[4922]: I0930 01:05:45.873677 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9fmgm"] Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.005095 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-catalog-content\") pod \"community-operators-9fmgm\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.005154 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-utilities\") pod \"community-operators-9fmgm\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.005210 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s294d\" (UniqueName: \"kubernetes.io/projected/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-kube-api-access-s294d\") pod \"community-operators-9fmgm\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.107451 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-catalog-content\") pod \"community-operators-9fmgm\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.107544 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-utilities\") pod \"community-operators-9fmgm\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.107925 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-catalog-content\") pod \"community-operators-9fmgm\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.107975 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-utilities\") pod \"community-operators-9fmgm\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.107588 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s294d\" (UniqueName: \"kubernetes.io/projected/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-kube-api-access-s294d\") pod \"community-operators-9fmgm\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.135728 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s294d\" (UniqueName: \"kubernetes.io/projected/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-kube-api-access-s294d\") pod \"community-operators-9fmgm\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.210089 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.679238 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9fmgm"] Sep 30 01:05:46 crc kubenswrapper[4922]: W0930 01:05:46.688729 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0b2ca37_d678_4a57_8d27_05a2e3cd785d.slice/crio-926e4295f28048064566b0830ca97ade9a109aee3c883f456eadb2cd51e1924b WatchSource:0}: Error finding container 926e4295f28048064566b0830ca97ade9a109aee3c883f456eadb2cd51e1924b: Status 404 returned error can't find the container with id 926e4295f28048064566b0830ca97ade9a109aee3c883f456eadb2cd51e1924b Sep 30 01:05:46 crc kubenswrapper[4922]: I0930 01:05:46.808225 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmgm" event={"ID":"c0b2ca37-d678-4a57-8d27-05a2e3cd785d","Type":"ContainerStarted","Data":"926e4295f28048064566b0830ca97ade9a109aee3c883f456eadb2cd51e1924b"} Sep 30 01:05:47 crc kubenswrapper[4922]: I0930 01:05:47.822968 4922 generic.go:334] "Generic (PLEG): container finished" podID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerID="aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b" exitCode=0 Sep 30 01:05:47 crc kubenswrapper[4922]: I0930 01:05:47.823058 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmgm" event={"ID":"c0b2ca37-d678-4a57-8d27-05a2e3cd785d","Type":"ContainerDied","Data":"aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b"} Sep 30 01:05:49 crc kubenswrapper[4922]: I0930 01:05:49.852524 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmgm" event={"ID":"c0b2ca37-d678-4a57-8d27-05a2e3cd785d","Type":"ContainerStarted","Data":"0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49"} Sep 30 01:05:50 crc kubenswrapper[4922]: I0930 01:05:50.870248 4922 generic.go:334] "Generic (PLEG): container finished" podID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerID="0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49" exitCode=0 Sep 30 01:05:50 crc kubenswrapper[4922]: I0930 01:05:50.870320 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmgm" event={"ID":"c0b2ca37-d678-4a57-8d27-05a2e3cd785d","Type":"ContainerDied","Data":"0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49"} Sep 30 01:05:51 crc kubenswrapper[4922]: I0930 01:05:51.886332 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmgm" event={"ID":"c0b2ca37-d678-4a57-8d27-05a2e3cd785d","Type":"ContainerStarted","Data":"66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310"} Sep 30 01:05:51 crc kubenswrapper[4922]: I0930 01:05:51.942085 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9fmgm" podStartSLOduration=3.511815317 podStartE2EDuration="6.942065493s" podCreationTimestamp="2025-09-30 01:05:45 +0000 UTC" firstStartedPulling="2025-09-30 01:05:47.827529318 +0000 UTC m=+9552.137818171" lastFinishedPulling="2025-09-30 01:05:51.257779524 +0000 UTC m=+9555.568068347" observedRunningTime="2025-09-30 01:05:51.935534351 +0000 UTC m=+9556.245823204" watchObservedRunningTime="2025-09-30 01:05:51.942065493 +0000 UTC m=+9556.252354316" Sep 30 01:05:56 crc kubenswrapper[4922]: I0930 01:05:56.210623 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:56 crc kubenswrapper[4922]: I0930 01:05:56.212628 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:56 crc kubenswrapper[4922]: I0930 01:05:56.300356 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:57 crc kubenswrapper[4922]: I0930 01:05:57.045843 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:57 crc kubenswrapper[4922]: I0930 01:05:57.112552 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9fmgm"] Sep 30 01:05:58 crc kubenswrapper[4922]: I0930 01:05:58.912732 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:05:58 crc kubenswrapper[4922]: I0930 01:05:58.913158 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:05:58 crc kubenswrapper[4922]: I0930 01:05:58.978863 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9fmgm" podUID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerName="registry-server" containerID="cri-o://66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310" gracePeriod=2 Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.622897 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.681924 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-catalog-content\") pod \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.682622 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s294d\" (UniqueName: \"kubernetes.io/projected/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-kube-api-access-s294d\") pod \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.682885 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-utilities\") pod \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\" (UID: \"c0b2ca37-d678-4a57-8d27-05a2e3cd785d\") " Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.684442 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-utilities" (OuterVolumeSpecName: "utilities") pod "c0b2ca37-d678-4a57-8d27-05a2e3cd785d" (UID: "c0b2ca37-d678-4a57-8d27-05a2e3cd785d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.690687 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-kube-api-access-s294d" (OuterVolumeSpecName: "kube-api-access-s294d") pod "c0b2ca37-d678-4a57-8d27-05a2e3cd785d" (UID: "c0b2ca37-d678-4a57-8d27-05a2e3cd785d"). InnerVolumeSpecName "kube-api-access-s294d". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.746546 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0b2ca37-d678-4a57-8d27-05a2e3cd785d" (UID: "c0b2ca37-d678-4a57-8d27-05a2e3cd785d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.785832 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.785872 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.785886 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s294d\" (UniqueName: \"kubernetes.io/projected/c0b2ca37-d678-4a57-8d27-05a2e3cd785d-kube-api-access-s294d\") on node \"crc\" DevicePath \"\"" Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.995745 4922 generic.go:334] "Generic (PLEG): container finished" podID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerID="66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310" exitCode=0 Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.995853 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmgm" event={"ID":"c0b2ca37-d678-4a57-8d27-05a2e3cd785d","Type":"ContainerDied","Data":"66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310"} Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.995896 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fmgm" event={"ID":"c0b2ca37-d678-4a57-8d27-05a2e3cd785d","Type":"ContainerDied","Data":"926e4295f28048064566b0830ca97ade9a109aee3c883f456eadb2cd51e1924b"} Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.995929 4922 scope.go:117] "RemoveContainer" containerID="66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310" Sep 30 01:05:59 crc kubenswrapper[4922]: I0930 01:05:59.996142 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fmgm" Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.028541 4922 scope.go:117] "RemoveContainer" containerID="0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49" Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.069670 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9fmgm"] Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.083043 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9fmgm"] Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.089528 4922 scope.go:117] "RemoveContainer" containerID="aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b" Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.125977 4922 scope.go:117] "RemoveContainer" containerID="66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310" Sep 30 01:06:00 crc kubenswrapper[4922]: E0930 01:06:00.126527 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310\": container with ID starting with 66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310 not found: ID does not exist" containerID="66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310" Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.126577 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310"} err="failed to get container status \"66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310\": rpc error: code = NotFound desc = could not find container \"66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310\": container with ID starting with 66bd6e65d5277c8618c741fd9aa38e1ee2a78d9cca0640450d34497412444310 not found: ID does not exist" Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.126611 4922 scope.go:117] "RemoveContainer" containerID="0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49" Sep 30 01:06:00 crc kubenswrapper[4922]: E0930 01:06:00.127678 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49\": container with ID starting with 0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49 not found: ID does not exist" containerID="0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49" Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.127714 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49"} err="failed to get container status \"0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49\": rpc error: code = NotFound desc = could not find container \"0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49\": container with ID starting with 0bed8b6c8116f1c6d308733fc7038d0b4dd1a2f3751e591ac05520fc82df9b49 not found: ID does not exist" Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.127770 4922 scope.go:117] "RemoveContainer" containerID="aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b" Sep 30 01:06:00 crc kubenswrapper[4922]: E0930 01:06:00.128241 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b\": container with ID starting with aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b not found: ID does not exist" containerID="aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b" Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.128267 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b"} err="failed to get container status \"aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b\": rpc error: code = NotFound desc = could not find container \"aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b\": container with ID starting with aa11e0703ecfd409c7051a83a92803fecb325779f1e4d1b138a971f7ace7662b not found: ID does not exist" Sep 30 01:06:00 crc kubenswrapper[4922]: I0930 01:06:00.436025 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" path="/var/lib/kubelet/pods/c0b2ca37-d678-4a57-8d27-05a2e3cd785d/volumes" Sep 30 01:06:09 crc kubenswrapper[4922]: I0930 01:06:09.926023 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q4l95"] Sep 30 01:06:09 crc kubenswrapper[4922]: E0930 01:06:09.927219 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerName="extract-utilities" Sep 30 01:06:09 crc kubenswrapper[4922]: I0930 01:06:09.927241 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerName="extract-utilities" Sep 30 01:06:09 crc kubenswrapper[4922]: E0930 01:06:09.927279 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerName="extract-content" Sep 30 01:06:09 crc kubenswrapper[4922]: I0930 01:06:09.927291 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerName="extract-content" Sep 30 01:06:09 crc kubenswrapper[4922]: E0930 01:06:09.927307 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerName="registry-server" Sep 30 01:06:09 crc kubenswrapper[4922]: I0930 01:06:09.927319 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerName="registry-server" Sep 30 01:06:09 crc kubenswrapper[4922]: I0930 01:06:09.927779 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0b2ca37-d678-4a57-8d27-05a2e3cd785d" containerName="registry-server" Sep 30 01:06:09 crc kubenswrapper[4922]: I0930 01:06:09.930532 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:09 crc kubenswrapper[4922]: I0930 01:06:09.944494 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q4l95"] Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.029053 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-utilities\") pod \"certified-operators-q4l95\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.029526 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzggf\" (UniqueName: \"kubernetes.io/projected/2feded8b-863d-401e-9bca-a44c8db0b66b-kube-api-access-hzggf\") pod \"certified-operators-q4l95\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.029721 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-catalog-content\") pod \"certified-operators-q4l95\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.132454 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzggf\" (UniqueName: \"kubernetes.io/projected/2feded8b-863d-401e-9bca-a44c8db0b66b-kube-api-access-hzggf\") pod \"certified-operators-q4l95\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.132598 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-catalog-content\") pod \"certified-operators-q4l95\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.132769 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-utilities\") pod \"certified-operators-q4l95\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.133713 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-utilities\") pod \"certified-operators-q4l95\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.133785 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-catalog-content\") pod \"certified-operators-q4l95\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.159217 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzggf\" (UniqueName: \"kubernetes.io/projected/2feded8b-863d-401e-9bca-a44c8db0b66b-kube-api-access-hzggf\") pod \"certified-operators-q4l95\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.266173 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:10 crc kubenswrapper[4922]: I0930 01:06:10.790946 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q4l95"] Sep 30 01:06:11 crc kubenswrapper[4922]: I0930 01:06:11.157029 4922 generic.go:334] "Generic (PLEG): container finished" podID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerID="da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3" exitCode=0 Sep 30 01:06:11 crc kubenswrapper[4922]: I0930 01:06:11.157101 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4l95" event={"ID":"2feded8b-863d-401e-9bca-a44c8db0b66b","Type":"ContainerDied","Data":"da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3"} Sep 30 01:06:11 crc kubenswrapper[4922]: I0930 01:06:11.157624 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4l95" event={"ID":"2feded8b-863d-401e-9bca-a44c8db0b66b","Type":"ContainerStarted","Data":"70d1abedecdcf4fa2d4da3897da7ba9862eea4925e7454b9b35af73803490c46"} Sep 30 01:06:12 crc kubenswrapper[4922]: I0930 01:06:12.175034 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4l95" event={"ID":"2feded8b-863d-401e-9bca-a44c8db0b66b","Type":"ContainerStarted","Data":"8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227"} Sep 30 01:06:13 crc kubenswrapper[4922]: I0930 01:06:13.186485 4922 generic.go:334] "Generic (PLEG): container finished" podID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerID="8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227" exitCode=0 Sep 30 01:06:13 crc kubenswrapper[4922]: I0930 01:06:13.186656 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4l95" event={"ID":"2feded8b-863d-401e-9bca-a44c8db0b66b","Type":"ContainerDied","Data":"8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227"} Sep 30 01:06:14 crc kubenswrapper[4922]: I0930 01:06:14.205134 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4l95" event={"ID":"2feded8b-863d-401e-9bca-a44c8db0b66b","Type":"ContainerStarted","Data":"76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5"} Sep 30 01:06:14 crc kubenswrapper[4922]: I0930 01:06:14.227789 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q4l95" podStartSLOduration=2.558257595 podStartE2EDuration="5.22777023s" podCreationTimestamp="2025-09-30 01:06:09 +0000 UTC" firstStartedPulling="2025-09-30 01:06:11.159246589 +0000 UTC m=+9575.469535442" lastFinishedPulling="2025-09-30 01:06:13.828759224 +0000 UTC m=+9578.139048077" observedRunningTime="2025-09-30 01:06:14.223275448 +0000 UTC m=+9578.533564271" watchObservedRunningTime="2025-09-30 01:06:14.22777023 +0000 UTC m=+9578.538059033" Sep 30 01:06:20 crc kubenswrapper[4922]: I0930 01:06:20.266498 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:20 crc kubenswrapper[4922]: I0930 01:06:20.267088 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:20 crc kubenswrapper[4922]: I0930 01:06:20.355574 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:21 crc kubenswrapper[4922]: I0930 01:06:21.354293 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:21 crc kubenswrapper[4922]: I0930 01:06:21.426331 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q4l95"] Sep 30 01:06:23 crc kubenswrapper[4922]: I0930 01:06:23.315087 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q4l95" podUID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerName="registry-server" containerID="cri-o://76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5" gracePeriod=2 Sep 30 01:06:23 crc kubenswrapper[4922]: I0930 01:06:23.945944 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.079971 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzggf\" (UniqueName: \"kubernetes.io/projected/2feded8b-863d-401e-9bca-a44c8db0b66b-kube-api-access-hzggf\") pod \"2feded8b-863d-401e-9bca-a44c8db0b66b\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.080206 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-catalog-content\") pod \"2feded8b-863d-401e-9bca-a44c8db0b66b\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.080336 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-utilities\") pod \"2feded8b-863d-401e-9bca-a44c8db0b66b\" (UID: \"2feded8b-863d-401e-9bca-a44c8db0b66b\") " Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.081633 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-utilities" (OuterVolumeSpecName: "utilities") pod "2feded8b-863d-401e-9bca-a44c8db0b66b" (UID: "2feded8b-863d-401e-9bca-a44c8db0b66b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.090688 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2feded8b-863d-401e-9bca-a44c8db0b66b-kube-api-access-hzggf" (OuterVolumeSpecName: "kube-api-access-hzggf") pod "2feded8b-863d-401e-9bca-a44c8db0b66b" (UID: "2feded8b-863d-401e-9bca-a44c8db0b66b"). InnerVolumeSpecName "kube-api-access-hzggf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.153191 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2feded8b-863d-401e-9bca-a44c8db0b66b" (UID: "2feded8b-863d-401e-9bca-a44c8db0b66b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.183520 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.183570 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzggf\" (UniqueName: \"kubernetes.io/projected/2feded8b-863d-401e-9bca-a44c8db0b66b-kube-api-access-hzggf\") on node \"crc\" DevicePath \"\"" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.183595 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2feded8b-863d-401e-9bca-a44c8db0b66b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.329666 4922 generic.go:334] "Generic (PLEG): container finished" podID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerID="76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5" exitCode=0 Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.329713 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4l95" event={"ID":"2feded8b-863d-401e-9bca-a44c8db0b66b","Type":"ContainerDied","Data":"76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5"} Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.329748 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q4l95" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.329790 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4l95" event={"ID":"2feded8b-863d-401e-9bca-a44c8db0b66b","Type":"ContainerDied","Data":"70d1abedecdcf4fa2d4da3897da7ba9862eea4925e7454b9b35af73803490c46"} Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.329818 4922 scope.go:117] "RemoveContainer" containerID="76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.386436 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q4l95"] Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.390057 4922 scope.go:117] "RemoveContainer" containerID="8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.398998 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q4l95"] Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.415609 4922 scope.go:117] "RemoveContainer" containerID="da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.436923 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2feded8b-863d-401e-9bca-a44c8db0b66b" path="/var/lib/kubelet/pods/2feded8b-863d-401e-9bca-a44c8db0b66b/volumes" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.469614 4922 scope.go:117] "RemoveContainer" containerID="76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5" Sep 30 01:06:24 crc kubenswrapper[4922]: E0930 01:06:24.470078 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5\": container with ID starting with 76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5 not found: ID does not exist" containerID="76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.470115 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5"} err="failed to get container status \"76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5\": rpc error: code = NotFound desc = could not find container \"76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5\": container with ID starting with 76715c4ae1c143e42effc211abc8312a8620bac96db33ab5b1a00b09a7c2b7a5 not found: ID does not exist" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.470141 4922 scope.go:117] "RemoveContainer" containerID="8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227" Sep 30 01:06:24 crc kubenswrapper[4922]: E0930 01:06:24.470912 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227\": container with ID starting with 8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227 not found: ID does not exist" containerID="8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.470940 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227"} err="failed to get container status \"8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227\": rpc error: code = NotFound desc = could not find container \"8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227\": container with ID starting with 8c1a6f7b95863b9ccd7f9d5d7f27ce7e70a8aa82c119097bf7d20833b2172227 not found: ID does not exist" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.470956 4922 scope.go:117] "RemoveContainer" containerID="da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3" Sep 30 01:06:24 crc kubenswrapper[4922]: E0930 01:06:24.471372 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3\": container with ID starting with da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3 not found: ID does not exist" containerID="da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3" Sep 30 01:06:24 crc kubenswrapper[4922]: I0930 01:06:24.471454 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3"} err="failed to get container status \"da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3\": rpc error: code = NotFound desc = could not find container \"da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3\": container with ID starting with da32a754f98ff16fcb3881642d6e831fc455ca30c5dd5a394a8da1f42e1689b3 not found: ID does not exist" Sep 30 01:06:28 crc kubenswrapper[4922]: I0930 01:06:28.912984 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:06:28 crc kubenswrapper[4922]: I0930 01:06:28.913776 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:06:28 crc kubenswrapper[4922]: I0930 01:06:28.913845 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 01:06:28 crc kubenswrapper[4922]: I0930 01:06:28.915054 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:06:28 crc kubenswrapper[4922]: I0930 01:06:28.915163 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" gracePeriod=600 Sep 30 01:06:29 crc kubenswrapper[4922]: E0930 01:06:29.040752 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:06:29 crc kubenswrapper[4922]: I0930 01:06:29.401333 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" exitCode=0 Sep 30 01:06:29 crc kubenswrapper[4922]: I0930 01:06:29.401446 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3"} Sep 30 01:06:29 crc kubenswrapper[4922]: I0930 01:06:29.401727 4922 scope.go:117] "RemoveContainer" containerID="6686e73c3e2aba1d57d05c28c5f8c044d54fdff0633d242879d8a8c88e13fb85" Sep 30 01:06:29 crc kubenswrapper[4922]: I0930 01:06:29.403769 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:06:29 crc kubenswrapper[4922]: E0930 01:06:29.405102 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:06:41 crc kubenswrapper[4922]: I0930 01:06:41.422853 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:06:41 crc kubenswrapper[4922]: E0930 01:06:41.425558 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:06:55 crc kubenswrapper[4922]: I0930 01:06:55.422045 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:06:55 crc kubenswrapper[4922]: E0930 01:06:55.423103 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:07:06 crc kubenswrapper[4922]: I0930 01:07:06.438089 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:07:06 crc kubenswrapper[4922]: E0930 01:07:06.439810 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:07:22 crc kubenswrapper[4922]: I0930 01:07:22.431517 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:07:22 crc kubenswrapper[4922]: E0930 01:07:22.432413 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:07:33 crc kubenswrapper[4922]: I0930 01:07:33.422062 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:07:33 crc kubenswrapper[4922]: E0930 01:07:33.423014 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:07:46 crc kubenswrapper[4922]: I0930 01:07:46.438088 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:07:46 crc kubenswrapper[4922]: E0930 01:07:46.440037 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:08:01 crc kubenswrapper[4922]: I0930 01:08:01.422556 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:08:01 crc kubenswrapper[4922]: E0930 01:08:01.423307 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:08:16 crc kubenswrapper[4922]: I0930 01:08:16.431370 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:08:16 crc kubenswrapper[4922]: E0930 01:08:16.432110 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:08:31 crc kubenswrapper[4922]: I0930 01:08:31.422140 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:08:31 crc kubenswrapper[4922]: E0930 01:08:31.423826 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:08:46 crc kubenswrapper[4922]: I0930 01:08:46.443962 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:08:46 crc kubenswrapper[4922]: E0930 01:08:46.445361 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.128659 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w8xg8"] Sep 30 01:08:49 crc kubenswrapper[4922]: E0930 01:08:49.130239 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerName="registry-server" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.130263 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerName="registry-server" Sep 30 01:08:49 crc kubenswrapper[4922]: E0930 01:08:49.130324 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerName="extract-content" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.130335 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerName="extract-content" Sep 30 01:08:49 crc kubenswrapper[4922]: E0930 01:08:49.130352 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerName="extract-utilities" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.130365 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerName="extract-utilities" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.130769 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="2feded8b-863d-401e-9bca-a44c8db0b66b" containerName="registry-server" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.133455 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.153048 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w8xg8"] Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.337897 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-catalog-content\") pod \"redhat-operators-w8xg8\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.338014 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sp59\" (UniqueName: \"kubernetes.io/projected/97d4ca23-529a-4ced-8ea7-a71012323b24-kube-api-access-9sp59\") pod \"redhat-operators-w8xg8\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.338104 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-utilities\") pod \"redhat-operators-w8xg8\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.440380 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sp59\" (UniqueName: \"kubernetes.io/projected/97d4ca23-529a-4ced-8ea7-a71012323b24-kube-api-access-9sp59\") pod \"redhat-operators-w8xg8\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.440581 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-utilities\") pod \"redhat-operators-w8xg8\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.440775 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-catalog-content\") pod \"redhat-operators-w8xg8\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.441260 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-utilities\") pod \"redhat-operators-w8xg8\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.441342 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-catalog-content\") pod \"redhat-operators-w8xg8\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.464288 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sp59\" (UniqueName: \"kubernetes.io/projected/97d4ca23-529a-4ced-8ea7-a71012323b24-kube-api-access-9sp59\") pod \"redhat-operators-w8xg8\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:49 crc kubenswrapper[4922]: I0930 01:08:49.547883 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:50 crc kubenswrapper[4922]: I0930 01:08:50.007740 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w8xg8"] Sep 30 01:08:50 crc kubenswrapper[4922]: I0930 01:08:50.271335 4922 generic.go:334] "Generic (PLEG): container finished" podID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerID="b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2" exitCode=0 Sep 30 01:08:50 crc kubenswrapper[4922]: I0930 01:08:50.271423 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8xg8" event={"ID":"97d4ca23-529a-4ced-8ea7-a71012323b24","Type":"ContainerDied","Data":"b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2"} Sep 30 01:08:50 crc kubenswrapper[4922]: I0930 01:08:50.271621 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8xg8" event={"ID":"97d4ca23-529a-4ced-8ea7-a71012323b24","Type":"ContainerStarted","Data":"107dea1eb068a1ac47a3613069d6a07c78535cd231cef7271d7b77c79488df95"} Sep 30 01:08:52 crc kubenswrapper[4922]: I0930 01:08:52.295256 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8xg8" event={"ID":"97d4ca23-529a-4ced-8ea7-a71012323b24","Type":"ContainerStarted","Data":"dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8"} Sep 30 01:08:54 crc kubenswrapper[4922]: I0930 01:08:54.316940 4922 generic.go:334] "Generic (PLEG): container finished" podID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerID="dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8" exitCode=0 Sep 30 01:08:54 crc kubenswrapper[4922]: I0930 01:08:54.317019 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8xg8" event={"ID":"97d4ca23-529a-4ced-8ea7-a71012323b24","Type":"ContainerDied","Data":"dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8"} Sep 30 01:08:55 crc kubenswrapper[4922]: I0930 01:08:55.326855 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8xg8" event={"ID":"97d4ca23-529a-4ced-8ea7-a71012323b24","Type":"ContainerStarted","Data":"ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db"} Sep 30 01:08:55 crc kubenswrapper[4922]: I0930 01:08:55.358159 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w8xg8" podStartSLOduration=1.8451792569999999 podStartE2EDuration="6.358138674s" podCreationTimestamp="2025-09-30 01:08:49 +0000 UTC" firstStartedPulling="2025-09-30 01:08:50.273358239 +0000 UTC m=+9734.583647052" lastFinishedPulling="2025-09-30 01:08:54.786317646 +0000 UTC m=+9739.096606469" observedRunningTime="2025-09-30 01:08:55.35066869 +0000 UTC m=+9739.660957503" watchObservedRunningTime="2025-09-30 01:08:55.358138674 +0000 UTC m=+9739.668427507" Sep 30 01:08:59 crc kubenswrapper[4922]: I0930 01:08:59.548753 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:08:59 crc kubenswrapper[4922]: I0930 01:08:59.549516 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:09:00 crc kubenswrapper[4922]: I0930 01:09:00.639681 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-w8xg8" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerName="registry-server" probeResult="failure" output=< Sep 30 01:09:00 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 01:09:00 crc kubenswrapper[4922]: > Sep 30 01:09:01 crc kubenswrapper[4922]: I0930 01:09:01.436244 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:09:01 crc kubenswrapper[4922]: E0930 01:09:01.438359 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:09:09 crc kubenswrapper[4922]: I0930 01:09:09.641008 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:09:09 crc kubenswrapper[4922]: I0930 01:09:09.703153 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:09:09 crc kubenswrapper[4922]: I0930 01:09:09.901924 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w8xg8"] Sep 30 01:09:11 crc kubenswrapper[4922]: I0930 01:09:11.542336 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-w8xg8" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerName="registry-server" containerID="cri-o://ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db" gracePeriod=2 Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.163605 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.244179 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-catalog-content\") pod \"97d4ca23-529a-4ced-8ea7-a71012323b24\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.244353 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sp59\" (UniqueName: \"kubernetes.io/projected/97d4ca23-529a-4ced-8ea7-a71012323b24-kube-api-access-9sp59\") pod \"97d4ca23-529a-4ced-8ea7-a71012323b24\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.244432 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-utilities\") pod \"97d4ca23-529a-4ced-8ea7-a71012323b24\" (UID: \"97d4ca23-529a-4ced-8ea7-a71012323b24\") " Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.245141 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-utilities" (OuterVolumeSpecName: "utilities") pod "97d4ca23-529a-4ced-8ea7-a71012323b24" (UID: "97d4ca23-529a-4ced-8ea7-a71012323b24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.245641 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.326194 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "97d4ca23-529a-4ced-8ea7-a71012323b24" (UID: "97d4ca23-529a-4ced-8ea7-a71012323b24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.348287 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97d4ca23-529a-4ced-8ea7-a71012323b24-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.423089 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:09:12 crc kubenswrapper[4922]: E0930 01:09:12.423550 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.560050 4922 generic.go:334] "Generic (PLEG): container finished" podID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerID="ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db" exitCode=0 Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.560114 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8xg8" event={"ID":"97d4ca23-529a-4ced-8ea7-a71012323b24","Type":"ContainerDied","Data":"ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db"} Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.560155 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8xg8" event={"ID":"97d4ca23-529a-4ced-8ea7-a71012323b24","Type":"ContainerDied","Data":"107dea1eb068a1ac47a3613069d6a07c78535cd231cef7271d7b77c79488df95"} Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.560169 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8xg8" Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.560182 4922 scope.go:117] "RemoveContainer" containerID="ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db" Sep 30 01:09:12 crc kubenswrapper[4922]: I0930 01:09:12.605004 4922 scope.go:117] "RemoveContainer" containerID="dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.049296 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97d4ca23-529a-4ced-8ea7-a71012323b24-kube-api-access-9sp59" (OuterVolumeSpecName: "kube-api-access-9sp59") pod "97d4ca23-529a-4ced-8ea7-a71012323b24" (UID: "97d4ca23-529a-4ced-8ea7-a71012323b24"). InnerVolumeSpecName "kube-api-access-9sp59". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.065558 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sp59\" (UniqueName: \"kubernetes.io/projected/97d4ca23-529a-4ced-8ea7-a71012323b24-kube-api-access-9sp59\") on node \"crc\" DevicePath \"\"" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.076720 4922 scope.go:117] "RemoveContainer" containerID="b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.187502 4922 scope.go:117] "RemoveContainer" containerID="ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db" Sep 30 01:09:13 crc kubenswrapper[4922]: E0930 01:09:13.188221 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db\": container with ID starting with ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db not found: ID does not exist" containerID="ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.188254 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db"} err="failed to get container status \"ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db\": rpc error: code = NotFound desc = could not find container \"ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db\": container with ID starting with ca535945e21623261e1bd6a1c1b226ca2be97123ee55531a24941c957f1038db not found: ID does not exist" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.188279 4922 scope.go:117] "RemoveContainer" containerID="dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8" Sep 30 01:09:13 crc kubenswrapper[4922]: E0930 01:09:13.188604 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8\": container with ID starting with dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8 not found: ID does not exist" containerID="dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.188631 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8"} err="failed to get container status \"dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8\": rpc error: code = NotFound desc = could not find container \"dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8\": container with ID starting with dafe9651353dd67562c6c47d971424d6fd3e1a276d29790d8b5823c4b07fd1c8 not found: ID does not exist" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.188648 4922 scope.go:117] "RemoveContainer" containerID="b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2" Sep 30 01:09:13 crc kubenswrapper[4922]: E0930 01:09:13.188831 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2\": container with ID starting with b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2 not found: ID does not exist" containerID="b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.188854 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2"} err="failed to get container status \"b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2\": rpc error: code = NotFound desc = could not find container \"b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2\": container with ID starting with b00649d48b945b7ec34f31edbeebbd84c39319d3a22c6a757d4034a7ee7ec4a2 not found: ID does not exist" Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.251380 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w8xg8"] Sep 30 01:09:13 crc kubenswrapper[4922]: I0930 01:09:13.260437 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-w8xg8"] Sep 30 01:09:14 crc kubenswrapper[4922]: I0930 01:09:14.441693 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" path="/var/lib/kubelet/pods/97d4ca23-529a-4ced-8ea7-a71012323b24/volumes" Sep 30 01:09:24 crc kubenswrapper[4922]: I0930 01:09:24.423781 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:09:24 crc kubenswrapper[4922]: E0930 01:09:24.425227 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:09:37 crc kubenswrapper[4922]: I0930 01:09:37.423044 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:09:37 crc kubenswrapper[4922]: E0930 01:09:37.424683 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:09:48 crc kubenswrapper[4922]: I0930 01:09:48.422187 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:09:48 crc kubenswrapper[4922]: E0930 01:09:48.423317 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:10:02 crc kubenswrapper[4922]: I0930 01:10:02.422330 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:10:02 crc kubenswrapper[4922]: E0930 01:10:02.423685 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:10:16 crc kubenswrapper[4922]: I0930 01:10:16.438876 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:10:16 crc kubenswrapper[4922]: E0930 01:10:16.442131 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:10:29 crc kubenswrapper[4922]: I0930 01:10:29.422158 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:10:29 crc kubenswrapper[4922]: E0930 01:10:29.423192 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:10:42 crc kubenswrapper[4922]: I0930 01:10:42.422935 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:10:42 crc kubenswrapper[4922]: E0930 01:10:42.424086 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:10:54 crc kubenswrapper[4922]: I0930 01:10:54.423261 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:10:54 crc kubenswrapper[4922]: E0930 01:10:54.424242 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:11:07 crc kubenswrapper[4922]: I0930 01:11:07.422139 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:11:07 crc kubenswrapper[4922]: E0930 01:11:07.423532 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:11:20 crc kubenswrapper[4922]: I0930 01:11:20.422718 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:11:20 crc kubenswrapper[4922]: E0930 01:11:20.424057 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:11:35 crc kubenswrapper[4922]: I0930 01:11:35.425017 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:11:36 crc kubenswrapper[4922]: I0930 01:11:36.450288 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"4cad47b00f52f9d838669b5b525326373baa27775879595964bddcea5ac9aba2"} Sep 30 01:11:47 crc kubenswrapper[4922]: I0930 01:11:47.603773 4922 generic.go:334] "Generic (PLEG): container finished" podID="22c6a55f-61a1-4731-b0d7-2864a91aa8ec" containerID="91723c4b05c61709fbe9cd96d3f8da96f5fa33d82185984be4159c63ffe87b2f" exitCode=0 Sep 30 01:11:47 crc kubenswrapper[4922]: I0930 01:11:47.603842 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" event={"ID":"22c6a55f-61a1-4731-b0d7-2864a91aa8ec","Type":"ContainerDied","Data":"91723c4b05c61709fbe9cd96d3f8da96f5fa33d82185984be4159c63ffe87b2f"} Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.104321 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.119494 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-inventory\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.119636 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-1\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.119678 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ceph\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.119729 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-1\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.120812 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-0\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.120891 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-combined-ca-bundle\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.120947 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ssh-key\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.120990 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-0\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.121031 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-1\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.121085 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-0\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.121119 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ndb4\" (UniqueName: \"kubernetes.io/projected/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-kube-api-access-2ndb4\") pod \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\" (UID: \"22c6a55f-61a1-4731-b0d7-2864a91aa8ec\") " Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.128511 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.133975 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ceph" (OuterVolumeSpecName: "ceph") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.134949 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-kube-api-access-2ndb4" (OuterVolumeSpecName: "kube-api-access-2ndb4") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "kube-api-access-2ndb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.176575 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.176936 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-inventory" (OuterVolumeSpecName: "inventory") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.180886 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.182249 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.188955 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.197355 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.200770 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.207538 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "22c6a55f-61a1-4731-b0d7-2864a91aa8ec" (UID: "22c6a55f-61a1-4731-b0d7-2864a91aa8ec"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223635 4922 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223676 4922 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223690 4922 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ceph\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223703 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223720 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223731 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223744 4922 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223755 4922 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223766 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223777 4922 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.223788 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ndb4\" (UniqueName: \"kubernetes.io/projected/22c6a55f-61a1-4731-b0d7-2864a91aa8ec-kube-api-access-2ndb4\") on node \"crc\" DevicePath \"\"" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.633461 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" event={"ID":"22c6a55f-61a1-4731-b0d7-2864a91aa8ec","Type":"ContainerDied","Data":"6f95ff284a9c477b2d7e3019dd8866dc4de97c728002090bbaa0f1722527c288"} Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.633689 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f95ff284a9c477b2d7e3019dd8866dc4de97c728002090bbaa0f1722527c288" Sep 30 01:11:49 crc kubenswrapper[4922]: I0930 01:11:49.633525 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv" Sep 30 01:13:58 crc kubenswrapper[4922]: I0930 01:13:58.912505 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:13:58 crc kubenswrapper[4922]: I0930 01:13:58.913320 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:14:07 crc kubenswrapper[4922]: I0930 01:14:07.806701 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Sep 30 01:14:07 crc kubenswrapper[4922]: I0930 01:14:07.807637 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="0eaa79ab-916d-4190-a6cf-fc62fb86f89a" containerName="adoption" containerID="cri-o://beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7" gracePeriod=30 Sep 30 01:14:28 crc kubenswrapper[4922]: I0930 01:14:28.912828 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:14:28 crc kubenswrapper[4922]: I0930 01:14:28.913453 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.415954 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.539835 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vx6j\" (UniqueName: \"kubernetes.io/projected/0eaa79ab-916d-4190-a6cf-fc62fb86f89a-kube-api-access-7vx6j\") pod \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\" (UID: \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\") " Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.540948 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\") pod \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\" (UID: \"0eaa79ab-916d-4190-a6cf-fc62fb86f89a\") " Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.546275 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eaa79ab-916d-4190-a6cf-fc62fb86f89a-kube-api-access-7vx6j" (OuterVolumeSpecName: "kube-api-access-7vx6j") pod "0eaa79ab-916d-4190-a6cf-fc62fb86f89a" (UID: "0eaa79ab-916d-4190-a6cf-fc62fb86f89a"). InnerVolumeSpecName "kube-api-access-7vx6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.563341 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3" (OuterVolumeSpecName: "mariadb-data") pod "0eaa79ab-916d-4190-a6cf-fc62fb86f89a" (UID: "0eaa79ab-916d-4190-a6cf-fc62fb86f89a"). InnerVolumeSpecName "pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.643505 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vx6j\" (UniqueName: \"kubernetes.io/projected/0eaa79ab-916d-4190-a6cf-fc62fb86f89a-kube-api-access-7vx6j\") on node \"crc\" DevicePath \"\"" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.643828 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\") on node \"crc\" " Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.683816 4922 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.683959 4922 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3") on node "crc" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.745802 4922 reconciler_common.go:293] "Volume detached for volume \"pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d8b4fe-fa52-48ca-95d6-6438cdf246b3\") on node \"crc\" DevicePath \"\"" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.873675 4922 generic.go:334] "Generic (PLEG): container finished" podID="0eaa79ab-916d-4190-a6cf-fc62fb86f89a" containerID="beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7" exitCode=137 Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.873757 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"0eaa79ab-916d-4190-a6cf-fc62fb86f89a","Type":"ContainerDied","Data":"beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7"} Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.873802 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"0eaa79ab-916d-4190-a6cf-fc62fb86f89a","Type":"ContainerDied","Data":"dc09b6625b6e72f4bd495cac7861f139e54c5ed05aaadc1015212799976a89fb"} Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.873854 4922 scope.go:117] "RemoveContainer" containerID="beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.874010 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.908110 4922 scope.go:117] "RemoveContainer" containerID="beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7" Sep 30 01:14:38 crc kubenswrapper[4922]: E0930 01:14:38.911478 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7\": container with ID starting with beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7 not found: ID does not exist" containerID="beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.911546 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7"} err="failed to get container status \"beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7\": rpc error: code = NotFound desc = could not find container \"beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7\": container with ID starting with beef6d21ae176e5d8f529b6aa39fdfce2a4a10c76f4ced9d4972a06197ce45a7 not found: ID does not exist" Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.934477 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Sep 30 01:14:38 crc kubenswrapper[4922]: I0930 01:14:38.947096 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Sep 30 01:14:39 crc kubenswrapper[4922]: I0930 01:14:39.731503 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Sep 30 01:14:39 crc kubenswrapper[4922]: I0930 01:14:39.732157 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="ec894f89-6b62-4dee-80df-c4ed29e9d117" containerName="adoption" containerID="cri-o://86ad15efac297783eeab263e09ee7579ee7da29b56a86ceb84e37ded3d0a3156" gracePeriod=30 Sep 30 01:14:40 crc kubenswrapper[4922]: I0930 01:14:40.435027 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eaa79ab-916d-4190-a6cf-fc62fb86f89a" path="/var/lib/kubelet/pods/0eaa79ab-916d-4190-a6cf-fc62fb86f89a/volumes" Sep 30 01:14:58 crc kubenswrapper[4922]: I0930 01:14:58.912661 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:14:58 crc kubenswrapper[4922]: I0930 01:14:58.913450 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:14:58 crc kubenswrapper[4922]: I0930 01:14:58.913530 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 01:14:58 crc kubenswrapper[4922]: I0930 01:14:58.914698 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4cad47b00f52f9d838669b5b525326373baa27775879595964bddcea5ac9aba2"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:14:58 crc kubenswrapper[4922]: I0930 01:14:58.914800 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://4cad47b00f52f9d838669b5b525326373baa27775879595964bddcea5ac9aba2" gracePeriod=600 Sep 30 01:14:59 crc kubenswrapper[4922]: I0930 01:14:59.150312 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="4cad47b00f52f9d838669b5b525326373baa27775879595964bddcea5ac9aba2" exitCode=0 Sep 30 01:14:59 crc kubenswrapper[4922]: I0930 01:14:59.150375 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"4cad47b00f52f9d838669b5b525326373baa27775879595964bddcea5ac9aba2"} Sep 30 01:14:59 crc kubenswrapper[4922]: I0930 01:14:59.150460 4922 scope.go:117] "RemoveContainer" containerID="e1f4c4eec81e954438764821e3a0e172dd7235585b76cd8036f672b5160267f3" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.180475 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff"} Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.219857 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp"] Sep 30 01:15:00 crc kubenswrapper[4922]: E0930 01:15:00.220514 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerName="registry-server" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.220538 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerName="registry-server" Sep 30 01:15:00 crc kubenswrapper[4922]: E0930 01:15:00.220565 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eaa79ab-916d-4190-a6cf-fc62fb86f89a" containerName="adoption" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.220576 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eaa79ab-916d-4190-a6cf-fc62fb86f89a" containerName="adoption" Sep 30 01:15:00 crc kubenswrapper[4922]: E0930 01:15:00.220600 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerName="extract-content" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.220608 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerName="extract-content" Sep 30 01:15:00 crc kubenswrapper[4922]: E0930 01:15:00.220622 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerName="extract-utilities" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.220630 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerName="extract-utilities" Sep 30 01:15:00 crc kubenswrapper[4922]: E0930 01:15:00.220664 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22c6a55f-61a1-4731-b0d7-2864a91aa8ec" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.220673 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="22c6a55f-61a1-4731-b0d7-2864a91aa8ec" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.220924 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="22c6a55f-61a1-4731-b0d7-2864a91aa8ec" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.220951 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="97d4ca23-529a-4ced-8ea7-a71012323b24" containerName="registry-server" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.220970 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eaa79ab-916d-4190-a6cf-fc62fb86f89a" containerName="adoption" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.221974 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.225126 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.225957 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.244111 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp"] Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.396135 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l47kz\" (UniqueName: \"kubernetes.io/projected/b7586c93-c376-431b-83e3-f7cda6728bba-kube-api-access-l47kz\") pod \"collect-profiles-29319915-xznwp\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.396224 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7586c93-c376-431b-83e3-f7cda6728bba-config-volume\") pod \"collect-profiles-29319915-xznwp\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.397776 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7586c93-c376-431b-83e3-f7cda6728bba-secret-volume\") pod \"collect-profiles-29319915-xznwp\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.500681 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l47kz\" (UniqueName: \"kubernetes.io/projected/b7586c93-c376-431b-83e3-f7cda6728bba-kube-api-access-l47kz\") pod \"collect-profiles-29319915-xznwp\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.503766 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7586c93-c376-431b-83e3-f7cda6728bba-config-volume\") pod \"collect-profiles-29319915-xznwp\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.503854 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7586c93-c376-431b-83e3-f7cda6728bba-secret-volume\") pod \"collect-profiles-29319915-xznwp\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.505323 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7586c93-c376-431b-83e3-f7cda6728bba-config-volume\") pod \"collect-profiles-29319915-xznwp\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.515898 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7586c93-c376-431b-83e3-f7cda6728bba-secret-volume\") pod \"collect-profiles-29319915-xznwp\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.531214 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l47kz\" (UniqueName: \"kubernetes.io/projected/b7586c93-c376-431b-83e3-f7cda6728bba-kube-api-access-l47kz\") pod \"collect-profiles-29319915-xznwp\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:00 crc kubenswrapper[4922]: I0930 01:15:00.560824 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:01 crc kubenswrapper[4922]: I0930 01:15:01.111305 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp"] Sep 30 01:15:01 crc kubenswrapper[4922]: I0930 01:15:01.196464 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" event={"ID":"b7586c93-c376-431b-83e3-f7cda6728bba","Type":"ContainerStarted","Data":"4f866b697c39d41b0d536275cbd3872ced650a89d006720fe00b1ab2f4a674ac"} Sep 30 01:15:02 crc kubenswrapper[4922]: I0930 01:15:02.213255 4922 generic.go:334] "Generic (PLEG): container finished" podID="b7586c93-c376-431b-83e3-f7cda6728bba" containerID="8e95b684bb10b6039b48a9a86e97d9016126332c6b15f6647fdf7f7876a3a949" exitCode=0 Sep 30 01:15:02 crc kubenswrapper[4922]: I0930 01:15:02.213452 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" event={"ID":"b7586c93-c376-431b-83e3-f7cda6728bba","Type":"ContainerDied","Data":"8e95b684bb10b6039b48a9a86e97d9016126332c6b15f6647fdf7f7876a3a949"} Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.691209 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.792108 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l47kz\" (UniqueName: \"kubernetes.io/projected/b7586c93-c376-431b-83e3-f7cda6728bba-kube-api-access-l47kz\") pod \"b7586c93-c376-431b-83e3-f7cda6728bba\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.792203 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7586c93-c376-431b-83e3-f7cda6728bba-secret-volume\") pod \"b7586c93-c376-431b-83e3-f7cda6728bba\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.792406 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7586c93-c376-431b-83e3-f7cda6728bba-config-volume\") pod \"b7586c93-c376-431b-83e3-f7cda6728bba\" (UID: \"b7586c93-c376-431b-83e3-f7cda6728bba\") " Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.793086 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7586c93-c376-431b-83e3-f7cda6728bba-config-volume" (OuterVolumeSpecName: "config-volume") pod "b7586c93-c376-431b-83e3-f7cda6728bba" (UID: "b7586c93-c376-431b-83e3-f7cda6728bba"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.799275 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7586c93-c376-431b-83e3-f7cda6728bba-kube-api-access-l47kz" (OuterVolumeSpecName: "kube-api-access-l47kz") pod "b7586c93-c376-431b-83e3-f7cda6728bba" (UID: "b7586c93-c376-431b-83e3-f7cda6728bba"). InnerVolumeSpecName "kube-api-access-l47kz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.801162 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7586c93-c376-431b-83e3-f7cda6728bba-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b7586c93-c376-431b-83e3-f7cda6728bba" (UID: "b7586c93-c376-431b-83e3-f7cda6728bba"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.895240 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l47kz\" (UniqueName: \"kubernetes.io/projected/b7586c93-c376-431b-83e3-f7cda6728bba-kube-api-access-l47kz\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.895295 4922 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7586c93-c376-431b-83e3-f7cda6728bba-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:03 crc kubenswrapper[4922]: I0930 01:15:03.895314 4922 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7586c93-c376-431b-83e3-f7cda6728bba-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:04 crc kubenswrapper[4922]: I0930 01:15:04.240758 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" event={"ID":"b7586c93-c376-431b-83e3-f7cda6728bba","Type":"ContainerDied","Data":"4f866b697c39d41b0d536275cbd3872ced650a89d006720fe00b1ab2f4a674ac"} Sep 30 01:15:04 crc kubenswrapper[4922]: I0930 01:15:04.240956 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f866b697c39d41b0d536275cbd3872ced650a89d006720fe00b1ab2f4a674ac" Sep 30 01:15:04 crc kubenswrapper[4922]: I0930 01:15:04.240836 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319915-xznwp" Sep 30 01:15:04 crc kubenswrapper[4922]: I0930 01:15:04.777030 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78"] Sep 30 01:15:04 crc kubenswrapper[4922]: I0930 01:15:04.794597 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319870-ghq78"] Sep 30 01:15:06 crc kubenswrapper[4922]: I0930 01:15:06.438136 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c858009c-8094-4aec-925d-fa1cc18fb120" path="/var/lib/kubelet/pods/c858009c-8094-4aec-925d-fa1cc18fb120/volumes" Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.325786 4922 generic.go:334] "Generic (PLEG): container finished" podID="ec894f89-6b62-4dee-80df-c4ed29e9d117" containerID="86ad15efac297783eeab263e09ee7579ee7da29b56a86ceb84e37ded3d0a3156" exitCode=137 Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.325933 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"ec894f89-6b62-4dee-80df-c4ed29e9d117","Type":"ContainerDied","Data":"86ad15efac297783eeab263e09ee7579ee7da29b56a86ceb84e37ded3d0a3156"} Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.326520 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"ec894f89-6b62-4dee-80df-c4ed29e9d117","Type":"ContainerDied","Data":"3e045e483e590adc65f0cdd64e179b739550a4b8b85d7ff96b5d95dbae7eab37"} Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.326542 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e045e483e590adc65f0cdd64e179b739550a4b8b85d7ff96b5d95dbae7eab37" Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.373475 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.568906 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/ec894f89-6b62-4dee-80df-c4ed29e9d117-ovn-data-cert\") pod \"ec894f89-6b62-4dee-80df-c4ed29e9d117\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.568973 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcp5l\" (UniqueName: \"kubernetes.io/projected/ec894f89-6b62-4dee-80df-c4ed29e9d117-kube-api-access-rcp5l\") pod \"ec894f89-6b62-4dee-80df-c4ed29e9d117\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.570221 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\") pod \"ec894f89-6b62-4dee-80df-c4ed29e9d117\" (UID: \"ec894f89-6b62-4dee-80df-c4ed29e9d117\") " Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.576442 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec894f89-6b62-4dee-80df-c4ed29e9d117-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "ec894f89-6b62-4dee-80df-c4ed29e9d117" (UID: "ec894f89-6b62-4dee-80df-c4ed29e9d117"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.577624 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec894f89-6b62-4dee-80df-c4ed29e9d117-kube-api-access-rcp5l" (OuterVolumeSpecName: "kube-api-access-rcp5l") pod "ec894f89-6b62-4dee-80df-c4ed29e9d117" (UID: "ec894f89-6b62-4dee-80df-c4ed29e9d117"). InnerVolumeSpecName "kube-api-access-rcp5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.591376 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0e903073-0261-4766-a534-d25ca1b3fa6d" (OuterVolumeSpecName: "ovn-data") pod "ec894f89-6b62-4dee-80df-c4ed29e9d117" (UID: "ec894f89-6b62-4dee-80df-c4ed29e9d117"). InnerVolumeSpecName "pvc-0e903073-0261-4766-a534-d25ca1b3fa6d". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.673749 4922 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/ec894f89-6b62-4dee-80df-c4ed29e9d117-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.674156 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcp5l\" (UniqueName: \"kubernetes.io/projected/ec894f89-6b62-4dee-80df-c4ed29e9d117-kube-api-access-rcp5l\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.674197 4922 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\") on node \"crc\" " Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.702005 4922 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.702146 4922 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-0e903073-0261-4766-a534-d25ca1b3fa6d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0e903073-0261-4766-a534-d25ca1b3fa6d") on node "crc" Sep 30 01:15:10 crc kubenswrapper[4922]: I0930 01:15:10.775814 4922 reconciler_common.go:293] "Volume detached for volume \"pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-0e903073-0261-4766-a534-d25ca1b3fa6d\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:11 crc kubenswrapper[4922]: I0930 01:15:11.338592 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Sep 30 01:15:11 crc kubenswrapper[4922]: I0930 01:15:11.397478 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Sep 30 01:15:11 crc kubenswrapper[4922]: I0930 01:15:11.407942 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Sep 30 01:15:12 crc kubenswrapper[4922]: I0930 01:15:12.436594 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec894f89-6b62-4dee-80df-c4ed29e9d117" path="/var/lib/kubelet/pods/ec894f89-6b62-4dee-80df-c4ed29e9d117/volumes" Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.773532 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fx9pg"] Sep 30 01:15:20 crc kubenswrapper[4922]: E0930 01:15:20.774967 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec894f89-6b62-4dee-80df-c4ed29e9d117" containerName="adoption" Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.774990 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec894f89-6b62-4dee-80df-c4ed29e9d117" containerName="adoption" Sep 30 01:15:20 crc kubenswrapper[4922]: E0930 01:15:20.775023 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7586c93-c376-431b-83e3-f7cda6728bba" containerName="collect-profiles" Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.775037 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7586c93-c376-431b-83e3-f7cda6728bba" containerName="collect-profiles" Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.775499 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7586c93-c376-431b-83e3-f7cda6728bba" containerName="collect-profiles" Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.775538 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec894f89-6b62-4dee-80df-c4ed29e9d117" containerName="adoption" Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.780857 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.797055 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx9pg"] Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.944588 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-utilities\") pod \"redhat-marketplace-fx9pg\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.945341 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlx9g\" (UniqueName: \"kubernetes.io/projected/1cb5fdd0-89b3-4c0c-837e-83ded3015481-kube-api-access-wlx9g\") pod \"redhat-marketplace-fx9pg\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:20 crc kubenswrapper[4922]: I0930 01:15:20.945543 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-catalog-content\") pod \"redhat-marketplace-fx9pg\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:21 crc kubenswrapper[4922]: I0930 01:15:21.047213 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-utilities\") pod \"redhat-marketplace-fx9pg\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:21 crc kubenswrapper[4922]: I0930 01:15:21.047313 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlx9g\" (UniqueName: \"kubernetes.io/projected/1cb5fdd0-89b3-4c0c-837e-83ded3015481-kube-api-access-wlx9g\") pod \"redhat-marketplace-fx9pg\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:21 crc kubenswrapper[4922]: I0930 01:15:21.047365 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-catalog-content\") pod \"redhat-marketplace-fx9pg\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:21 crc kubenswrapper[4922]: I0930 01:15:21.047725 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-utilities\") pod \"redhat-marketplace-fx9pg\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:21 crc kubenswrapper[4922]: I0930 01:15:21.047853 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-catalog-content\") pod \"redhat-marketplace-fx9pg\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:21 crc kubenswrapper[4922]: I0930 01:15:21.067645 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlx9g\" (UniqueName: \"kubernetes.io/projected/1cb5fdd0-89b3-4c0c-837e-83ded3015481-kube-api-access-wlx9g\") pod \"redhat-marketplace-fx9pg\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:21 crc kubenswrapper[4922]: I0930 01:15:21.107536 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:21 crc kubenswrapper[4922]: I0930 01:15:21.585682 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx9pg"] Sep 30 01:15:21 crc kubenswrapper[4922]: W0930 01:15:21.588773 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cb5fdd0_89b3_4c0c_837e_83ded3015481.slice/crio-e2125f412e5ae24dd7241c4fdc18607aa7758beb2e6bc24d5c2250952d55b1f7 WatchSource:0}: Error finding container e2125f412e5ae24dd7241c4fdc18607aa7758beb2e6bc24d5c2250952d55b1f7: Status 404 returned error can't find the container with id e2125f412e5ae24dd7241c4fdc18607aa7758beb2e6bc24d5c2250952d55b1f7 Sep 30 01:15:22 crc kubenswrapper[4922]: I0930 01:15:22.494296 4922 generic.go:334] "Generic (PLEG): container finished" podID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerID="5a14f74d267184d96118554f169c29fdea33acd86a4020d3cadd58d246fe13bf" exitCode=0 Sep 30 01:15:22 crc kubenswrapper[4922]: I0930 01:15:22.494413 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx9pg" event={"ID":"1cb5fdd0-89b3-4c0c-837e-83ded3015481","Type":"ContainerDied","Data":"5a14f74d267184d96118554f169c29fdea33acd86a4020d3cadd58d246fe13bf"} Sep 30 01:15:22 crc kubenswrapper[4922]: I0930 01:15:22.495009 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx9pg" event={"ID":"1cb5fdd0-89b3-4c0c-837e-83ded3015481","Type":"ContainerStarted","Data":"e2125f412e5ae24dd7241c4fdc18607aa7758beb2e6bc24d5c2250952d55b1f7"} Sep 30 01:15:22 crc kubenswrapper[4922]: I0930 01:15:22.497370 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:15:24 crc kubenswrapper[4922]: I0930 01:15:24.527891 4922 generic.go:334] "Generic (PLEG): container finished" podID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerID="2f1b87c24ee9649e3d4fb23a5edfd277334358526301eab8349a34fc92470bbe" exitCode=0 Sep 30 01:15:24 crc kubenswrapper[4922]: I0930 01:15:24.528011 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx9pg" event={"ID":"1cb5fdd0-89b3-4c0c-837e-83ded3015481","Type":"ContainerDied","Data":"2f1b87c24ee9649e3d4fb23a5edfd277334358526301eab8349a34fc92470bbe"} Sep 30 01:15:25 crc kubenswrapper[4922]: I0930 01:15:25.545912 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx9pg" event={"ID":"1cb5fdd0-89b3-4c0c-837e-83ded3015481","Type":"ContainerStarted","Data":"3ebf0237ef75264ed1bb79a90f96ff6382db07c88caa672878d61ecdbd9a659a"} Sep 30 01:15:25 crc kubenswrapper[4922]: I0930 01:15:25.582520 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fx9pg" podStartSLOduration=3.136186097 podStartE2EDuration="5.582489293s" podCreationTimestamp="2025-09-30 01:15:20 +0000 UTC" firstStartedPulling="2025-09-30 01:15:22.497121835 +0000 UTC m=+10126.807410648" lastFinishedPulling="2025-09-30 01:15:24.943424991 +0000 UTC m=+10129.253713844" observedRunningTime="2025-09-30 01:15:25.577337845 +0000 UTC m=+10129.887626668" watchObservedRunningTime="2025-09-30 01:15:25.582489293 +0000 UTC m=+10129.892778136" Sep 30 01:15:31 crc kubenswrapper[4922]: I0930 01:15:31.108541 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:31 crc kubenswrapper[4922]: I0930 01:15:31.109383 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:31 crc kubenswrapper[4922]: I0930 01:15:31.203361 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:31 crc kubenswrapper[4922]: I0930 01:15:31.696093 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:31 crc kubenswrapper[4922]: I0930 01:15:31.780138 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx9pg"] Sep 30 01:15:33 crc kubenswrapper[4922]: I0930 01:15:33.675606 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fx9pg" podUID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerName="registry-server" containerID="cri-o://3ebf0237ef75264ed1bb79a90f96ff6382db07c88caa672878d61ecdbd9a659a" gracePeriod=2 Sep 30 01:15:34 crc kubenswrapper[4922]: I0930 01:15:34.687159 4922 generic.go:334] "Generic (PLEG): container finished" podID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerID="3ebf0237ef75264ed1bb79a90f96ff6382db07c88caa672878d61ecdbd9a659a" exitCode=0 Sep 30 01:15:34 crc kubenswrapper[4922]: I0930 01:15:34.687202 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx9pg" event={"ID":"1cb5fdd0-89b3-4c0c-837e-83ded3015481","Type":"ContainerDied","Data":"3ebf0237ef75264ed1bb79a90f96ff6382db07c88caa672878d61ecdbd9a659a"} Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.560346 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.672088 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-catalog-content\") pod \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.672540 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-utilities\") pod \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.672599 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlx9g\" (UniqueName: \"kubernetes.io/projected/1cb5fdd0-89b3-4c0c-837e-83ded3015481-kube-api-access-wlx9g\") pod \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\" (UID: \"1cb5fdd0-89b3-4c0c-837e-83ded3015481\") " Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.673439 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-utilities" (OuterVolumeSpecName: "utilities") pod "1cb5fdd0-89b3-4c0c-837e-83ded3015481" (UID: "1cb5fdd0-89b3-4c0c-837e-83ded3015481"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.679149 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cb5fdd0-89b3-4c0c-837e-83ded3015481-kube-api-access-wlx9g" (OuterVolumeSpecName: "kube-api-access-wlx9g") pod "1cb5fdd0-89b3-4c0c-837e-83ded3015481" (UID: "1cb5fdd0-89b3-4c0c-837e-83ded3015481"). InnerVolumeSpecName "kube-api-access-wlx9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.685423 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1cb5fdd0-89b3-4c0c-837e-83ded3015481" (UID: "1cb5fdd0-89b3-4c0c-837e-83ded3015481"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.701879 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fx9pg" event={"ID":"1cb5fdd0-89b3-4c0c-837e-83ded3015481","Type":"ContainerDied","Data":"e2125f412e5ae24dd7241c4fdc18607aa7758beb2e6bc24d5c2250952d55b1f7"} Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.701928 4922 scope.go:117] "RemoveContainer" containerID="3ebf0237ef75264ed1bb79a90f96ff6382db07c88caa672878d61ecdbd9a659a" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.701944 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fx9pg" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.749301 4922 scope.go:117] "RemoveContainer" containerID="2f1b87c24ee9649e3d4fb23a5edfd277334358526301eab8349a34fc92470bbe" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.753964 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx9pg"] Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.765629 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fx9pg"] Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.775537 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.775567 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1cb5fdd0-89b3-4c0c-837e-83ded3015481-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.775580 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlx9g\" (UniqueName: \"kubernetes.io/projected/1cb5fdd0-89b3-4c0c-837e-83ded3015481-kube-api-access-wlx9g\") on node \"crc\" DevicePath \"\"" Sep 30 01:15:35 crc kubenswrapper[4922]: I0930 01:15:35.780120 4922 scope.go:117] "RemoveContainer" containerID="5a14f74d267184d96118554f169c29fdea33acd86a4020d3cadd58d246fe13bf" Sep 30 01:15:36 crc kubenswrapper[4922]: I0930 01:15:36.446820 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" path="/var/lib/kubelet/pods/1cb5fdd0-89b3-4c0c-837e-83ded3015481/volumes" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.135885 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dkftn"] Sep 30 01:15:58 crc kubenswrapper[4922]: E0930 01:15:58.137362 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerName="extract-utilities" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.137386 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerName="extract-utilities" Sep 30 01:15:58 crc kubenswrapper[4922]: E0930 01:15:58.137451 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerName="extract-content" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.137468 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerName="extract-content" Sep 30 01:15:58 crc kubenswrapper[4922]: E0930 01:15:58.137505 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerName="registry-server" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.137520 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerName="registry-server" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.137950 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cb5fdd0-89b3-4c0c-837e-83ded3015481" containerName="registry-server" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.142171 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.162860 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dkftn"] Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.271315 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-utilities\") pod \"community-operators-dkftn\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.271418 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-catalog-content\") pod \"community-operators-dkftn\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.271483 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvn57\" (UniqueName: \"kubernetes.io/projected/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-kube-api-access-rvn57\") pod \"community-operators-dkftn\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.373625 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-utilities\") pod \"community-operators-dkftn\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.373699 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-catalog-content\") pod \"community-operators-dkftn\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.373763 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvn57\" (UniqueName: \"kubernetes.io/projected/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-kube-api-access-rvn57\") pod \"community-operators-dkftn\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.375199 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-catalog-content\") pod \"community-operators-dkftn\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.375831 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-utilities\") pod \"community-operators-dkftn\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.396700 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvn57\" (UniqueName: \"kubernetes.io/projected/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-kube-api-access-rvn57\") pod \"community-operators-dkftn\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:58 crc kubenswrapper[4922]: I0930 01:15:58.476742 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:15:59 crc kubenswrapper[4922]: I0930 01:15:59.035240 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dkftn"] Sep 30 01:15:59 crc kubenswrapper[4922]: W0930 01:15:59.038815 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac5dcb68_ff2e_4bd9_ba8d_8e6a21c51ea3.slice/crio-dcd38de81e4b81b234bdaac4bb38b7f59e4d068555f2ca2971b53e276e24e28d WatchSource:0}: Error finding container dcd38de81e4b81b234bdaac4bb38b7f59e4d068555f2ca2971b53e276e24e28d: Status 404 returned error can't find the container with id dcd38de81e4b81b234bdaac4bb38b7f59e4d068555f2ca2971b53e276e24e28d Sep 30 01:16:00 crc kubenswrapper[4922]: I0930 01:16:00.033254 4922 generic.go:334] "Generic (PLEG): container finished" podID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerID="34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb" exitCode=0 Sep 30 01:16:00 crc kubenswrapper[4922]: I0930 01:16:00.033781 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkftn" event={"ID":"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3","Type":"ContainerDied","Data":"34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb"} Sep 30 01:16:00 crc kubenswrapper[4922]: I0930 01:16:00.034181 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkftn" event={"ID":"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3","Type":"ContainerStarted","Data":"dcd38de81e4b81b234bdaac4bb38b7f59e4d068555f2ca2971b53e276e24e28d"} Sep 30 01:16:00 crc kubenswrapper[4922]: I0930 01:16:00.336836 4922 scope.go:117] "RemoveContainer" containerID="236058568fa50ce7e0e1bcba3135a4823f93efe90c7a11f8dfd63a1aa8b1bd8d" Sep 30 01:16:00 crc kubenswrapper[4922]: I0930 01:16:00.373619 4922 scope.go:117] "RemoveContainer" containerID="86ad15efac297783eeab263e09ee7579ee7da29b56a86ceb84e37ded3d0a3156" Sep 30 01:16:01 crc kubenswrapper[4922]: I0930 01:16:01.050813 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkftn" event={"ID":"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3","Type":"ContainerStarted","Data":"141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf"} Sep 30 01:16:03 crc kubenswrapper[4922]: I0930 01:16:03.085648 4922 generic.go:334] "Generic (PLEG): container finished" podID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerID="141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf" exitCode=0 Sep 30 01:16:03 crc kubenswrapper[4922]: I0930 01:16:03.085735 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkftn" event={"ID":"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3","Type":"ContainerDied","Data":"141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf"} Sep 30 01:16:04 crc kubenswrapper[4922]: I0930 01:16:04.099891 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkftn" event={"ID":"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3","Type":"ContainerStarted","Data":"aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384"} Sep 30 01:16:04 crc kubenswrapper[4922]: I0930 01:16:04.122491 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dkftn" podStartSLOduration=2.664415076 podStartE2EDuration="6.122459258s" podCreationTimestamp="2025-09-30 01:15:58 +0000 UTC" firstStartedPulling="2025-09-30 01:16:00.037058494 +0000 UTC m=+10164.347347337" lastFinishedPulling="2025-09-30 01:16:03.495102696 +0000 UTC m=+10167.805391519" observedRunningTime="2025-09-30 01:16:04.116297886 +0000 UTC m=+10168.426586699" watchObservedRunningTime="2025-09-30 01:16:04.122459258 +0000 UTC m=+10168.432748071" Sep 30 01:16:08 crc kubenswrapper[4922]: I0930 01:16:08.477268 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:16:08 crc kubenswrapper[4922]: I0930 01:16:08.480120 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:16:08 crc kubenswrapper[4922]: I0930 01:16:08.558741 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:16:09 crc kubenswrapper[4922]: I0930 01:16:09.227500 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:16:09 crc kubenswrapper[4922]: I0930 01:16:09.279083 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dkftn"] Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.199690 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dkftn" podUID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerName="registry-server" containerID="cri-o://aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384" gracePeriod=2 Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.825086 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.885456 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvn57\" (UniqueName: \"kubernetes.io/projected/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-kube-api-access-rvn57\") pod \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.885754 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-catalog-content\") pod \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.885841 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-utilities\") pod \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\" (UID: \"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3\") " Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.886475 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-utilities" (OuterVolumeSpecName: "utilities") pod "ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" (UID: "ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.891839 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-kube-api-access-rvn57" (OuterVolumeSpecName: "kube-api-access-rvn57") pod "ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" (UID: "ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3"). InnerVolumeSpecName "kube-api-access-rvn57". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.948082 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" (UID: "ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.989222 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvn57\" (UniqueName: \"kubernetes.io/projected/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-kube-api-access-rvn57\") on node \"crc\" DevicePath \"\"" Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.989332 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:16:11 crc kubenswrapper[4922]: I0930 01:16:11.989429 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.212481 4922 generic.go:334] "Generic (PLEG): container finished" podID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerID="aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384" exitCode=0 Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.212544 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkftn" event={"ID":"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3","Type":"ContainerDied","Data":"aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384"} Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.212571 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dkftn" event={"ID":"ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3","Type":"ContainerDied","Data":"dcd38de81e4b81b234bdaac4bb38b7f59e4d068555f2ca2971b53e276e24e28d"} Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.212587 4922 scope.go:117] "RemoveContainer" containerID="aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.212709 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dkftn" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.251801 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dkftn"] Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.266893 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dkftn"] Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.279925 4922 scope.go:117] "RemoveContainer" containerID="141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.310444 4922 scope.go:117] "RemoveContainer" containerID="34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.359732 4922 scope.go:117] "RemoveContainer" containerID="aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384" Sep 30 01:16:12 crc kubenswrapper[4922]: E0930 01:16:12.360524 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384\": container with ID starting with aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384 not found: ID does not exist" containerID="aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.360604 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384"} err="failed to get container status \"aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384\": rpc error: code = NotFound desc = could not find container \"aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384\": container with ID starting with aaf8bc1d9a298c80aaf0dedb9b6431995face4f449b76b5b00235b17e4e68384 not found: ID does not exist" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.360630 4922 scope.go:117] "RemoveContainer" containerID="141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf" Sep 30 01:16:12 crc kubenswrapper[4922]: E0930 01:16:12.361028 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf\": container with ID starting with 141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf not found: ID does not exist" containerID="141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.361093 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf"} err="failed to get container status \"141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf\": rpc error: code = NotFound desc = could not find container \"141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf\": container with ID starting with 141cfab23aab8b3b1dddd6b4d1827da4464b2a0714689e927cca1c479615afdf not found: ID does not exist" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.361145 4922 scope.go:117] "RemoveContainer" containerID="34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb" Sep 30 01:16:12 crc kubenswrapper[4922]: E0930 01:16:12.361777 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb\": container with ID starting with 34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb not found: ID does not exist" containerID="34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.361822 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb"} err="failed to get container status \"34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb\": rpc error: code = NotFound desc = could not find container \"34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb\": container with ID starting with 34281c4da97de4b1e0069a5ee58a000c1039645baef1d37adb395adc4cf2eeeb not found: ID does not exist" Sep 30 01:16:12 crc kubenswrapper[4922]: I0930 01:16:12.438243 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" path="/var/lib/kubelet/pods/ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3/volumes" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.150823 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mh2ts/must-gather-9fwl7"] Sep 30 01:16:23 crc kubenswrapper[4922]: E0930 01:16:23.152916 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerName="extract-utilities" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.152949 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerName="extract-utilities" Sep 30 01:16:23 crc kubenswrapper[4922]: E0930 01:16:23.152975 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerName="registry-server" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.152983 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerName="registry-server" Sep 30 01:16:23 crc kubenswrapper[4922]: E0930 01:16:23.153092 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerName="extract-content" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.153187 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerName="extract-content" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.153671 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac5dcb68-ff2e-4bd9-ba8d-8e6a21c51ea3" containerName="registry-server" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.154858 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.183269 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mh2ts"/"openshift-service-ca.crt" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.184449 4922 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mh2ts"/"kube-root-ca.crt" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.184781 4922 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-mh2ts"/"default-dockercfg-rgbgx" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.227801 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mh2ts/must-gather-9fwl7"] Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.266524 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj6l8\" (UniqueName: \"kubernetes.io/projected/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-kube-api-access-bj6l8\") pod \"must-gather-9fwl7\" (UID: \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\") " pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.266692 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-must-gather-output\") pod \"must-gather-9fwl7\" (UID: \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\") " pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.368920 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj6l8\" (UniqueName: \"kubernetes.io/projected/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-kube-api-access-bj6l8\") pod \"must-gather-9fwl7\" (UID: \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\") " pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.369072 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-must-gather-output\") pod \"must-gather-9fwl7\" (UID: \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\") " pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.369560 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-must-gather-output\") pod \"must-gather-9fwl7\" (UID: \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\") " pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.393348 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj6l8\" (UniqueName: \"kubernetes.io/projected/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-kube-api-access-bj6l8\") pod \"must-gather-9fwl7\" (UID: \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\") " pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:16:23 crc kubenswrapper[4922]: I0930 01:16:23.514329 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:16:24 crc kubenswrapper[4922]: I0930 01:16:24.062719 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mh2ts/must-gather-9fwl7"] Sep 30 01:16:24 crc kubenswrapper[4922]: I0930 01:16:24.396298 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" event={"ID":"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a","Type":"ContainerStarted","Data":"b0e66e259a0d494592ddee7b085c745177a1e5279fcfa18532d1610911a0454e"} Sep 30 01:16:29 crc kubenswrapper[4922]: I0930 01:16:29.461862 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" event={"ID":"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a","Type":"ContainerStarted","Data":"e5607868a2eca84ce8942ade6eb02cc2f181556921559b78e4fa73b8b4de97d4"} Sep 30 01:16:29 crc kubenswrapper[4922]: I0930 01:16:29.462546 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" event={"ID":"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a","Type":"ContainerStarted","Data":"a56f00a5159702a4d224544fdf1f13b607b754cd2948510da59a69acb99d7ee9"} Sep 30 01:16:29 crc kubenswrapper[4922]: I0930 01:16:29.498721 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" podStartSLOduration=2.229472682 podStartE2EDuration="6.498705162s" podCreationTimestamp="2025-09-30 01:16:23 +0000 UTC" firstStartedPulling="2025-09-30 01:16:24.076646259 +0000 UTC m=+10188.386935082" lastFinishedPulling="2025-09-30 01:16:28.345878709 +0000 UTC m=+10192.656167562" observedRunningTime="2025-09-30 01:16:29.49336283 +0000 UTC m=+10193.803651653" watchObservedRunningTime="2025-09-30 01:16:29.498705162 +0000 UTC m=+10193.808993985" Sep 30 01:16:33 crc kubenswrapper[4922]: I0930 01:16:33.137546 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mh2ts/crc-debug-p55pn"] Sep 30 01:16:33 crc kubenswrapper[4922]: I0930 01:16:33.139307 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:16:33 crc kubenswrapper[4922]: I0930 01:16:33.290604 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0191d790-4f49-40d1-9eb8-e8b7e6620328-host\") pod \"crc-debug-p55pn\" (UID: \"0191d790-4f49-40d1-9eb8-e8b7e6620328\") " pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:16:33 crc kubenswrapper[4922]: I0930 01:16:33.291107 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2clgr\" (UniqueName: \"kubernetes.io/projected/0191d790-4f49-40d1-9eb8-e8b7e6620328-kube-api-access-2clgr\") pod \"crc-debug-p55pn\" (UID: \"0191d790-4f49-40d1-9eb8-e8b7e6620328\") " pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:16:33 crc kubenswrapper[4922]: I0930 01:16:33.393141 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2clgr\" (UniqueName: \"kubernetes.io/projected/0191d790-4f49-40d1-9eb8-e8b7e6620328-kube-api-access-2clgr\") pod \"crc-debug-p55pn\" (UID: \"0191d790-4f49-40d1-9eb8-e8b7e6620328\") " pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:16:33 crc kubenswrapper[4922]: I0930 01:16:33.393305 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0191d790-4f49-40d1-9eb8-e8b7e6620328-host\") pod \"crc-debug-p55pn\" (UID: \"0191d790-4f49-40d1-9eb8-e8b7e6620328\") " pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:16:33 crc kubenswrapper[4922]: I0930 01:16:33.393425 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0191d790-4f49-40d1-9eb8-e8b7e6620328-host\") pod \"crc-debug-p55pn\" (UID: \"0191d790-4f49-40d1-9eb8-e8b7e6620328\") " pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:16:33 crc kubenswrapper[4922]: I0930 01:16:33.416640 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2clgr\" (UniqueName: \"kubernetes.io/projected/0191d790-4f49-40d1-9eb8-e8b7e6620328-kube-api-access-2clgr\") pod \"crc-debug-p55pn\" (UID: \"0191d790-4f49-40d1-9eb8-e8b7e6620328\") " pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:16:33 crc kubenswrapper[4922]: I0930 01:16:33.456976 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:16:33 crc kubenswrapper[4922]: W0930 01:16:33.529938 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0191d790_4f49_40d1_9eb8_e8b7e6620328.slice/crio-914b63e7f41f75b2fafbc98770216a70c404b4138d3fb6632815259baec4880a WatchSource:0}: Error finding container 914b63e7f41f75b2fafbc98770216a70c404b4138d3fb6632815259baec4880a: Status 404 returned error can't find the container with id 914b63e7f41f75b2fafbc98770216a70c404b4138d3fb6632815259baec4880a Sep 30 01:16:34 crc kubenswrapper[4922]: I0930 01:16:34.531625 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/crc-debug-p55pn" event={"ID":"0191d790-4f49-40d1-9eb8-e8b7e6620328","Type":"ContainerStarted","Data":"914b63e7f41f75b2fafbc98770216a70c404b4138d3fb6632815259baec4880a"} Sep 30 01:16:44 crc kubenswrapper[4922]: I0930 01:16:44.634553 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/crc-debug-p55pn" event={"ID":"0191d790-4f49-40d1-9eb8-e8b7e6620328","Type":"ContainerStarted","Data":"e16ac49050ae218724ec3e3345999e367c0015f595a6bf7643a0ed6c4b3432ec"} Sep 30 01:17:28 crc kubenswrapper[4922]: I0930 01:17:28.917697 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:17:28 crc kubenswrapper[4922]: I0930 01:17:28.918282 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:17:58 crc kubenswrapper[4922]: I0930 01:17:58.912454 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:17:58 crc kubenswrapper[4922]: I0930 01:17:58.912906 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:18:01 crc kubenswrapper[4922]: I0930 01:18:01.405111 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_6931ec30-85ca-4bdc-824d-4c2a72fa36f8/init-config-reloader/0.log" Sep 30 01:18:01 crc kubenswrapper[4922]: I0930 01:18:01.542068 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_6931ec30-85ca-4bdc-824d-4c2a72fa36f8/init-config-reloader/0.log" Sep 30 01:18:01 crc kubenswrapper[4922]: I0930 01:18:01.629952 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_6931ec30-85ca-4bdc-824d-4c2a72fa36f8/alertmanager/0.log" Sep 30 01:18:01 crc kubenswrapper[4922]: I0930 01:18:01.765297 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_6931ec30-85ca-4bdc-824d-4c2a72fa36f8/config-reloader/0.log" Sep 30 01:18:01 crc kubenswrapper[4922]: I0930 01:18:01.945857 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_d77e021f-7f43-46d5-9458-6d96907107fa/aodh-api/0.log" Sep 30 01:18:02 crc kubenswrapper[4922]: I0930 01:18:02.017586 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_d77e021f-7f43-46d5-9458-6d96907107fa/aodh-evaluator/0.log" Sep 30 01:18:02 crc kubenswrapper[4922]: I0930 01:18:02.095423 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_d77e021f-7f43-46d5-9458-6d96907107fa/aodh-listener/0.log" Sep 30 01:18:02 crc kubenswrapper[4922]: I0930 01:18:02.185497 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_d77e021f-7f43-46d5-9458-6d96907107fa/aodh-notifier/0.log" Sep 30 01:18:02 crc kubenswrapper[4922]: I0930 01:18:02.363678 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-799c46f664-rfhxd_3e4d84e3-b47d-4b20-9ce7-c8ca1b439159/barbican-api/0.log" Sep 30 01:18:02 crc kubenswrapper[4922]: I0930 01:18:02.564914 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-799c46f664-rfhxd_3e4d84e3-b47d-4b20-9ce7-c8ca1b439159/barbican-api-log/0.log" Sep 30 01:18:02 crc kubenswrapper[4922]: I0930 01:18:02.724013 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d76b6d974-tr884_8332b0f8-9c00-4c2d-8189-3145bcf70023/barbican-keystone-listener/0.log" Sep 30 01:18:02 crc kubenswrapper[4922]: I0930 01:18:02.797430 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d76b6d974-tr884_8332b0f8-9c00-4c2d-8189-3145bcf70023/barbican-keystone-listener-log/0.log" Sep 30 01:18:02 crc kubenswrapper[4922]: I0930 01:18:02.987988 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-55458695f-7vzm8_f83b7812-767b-4937-b1d1-2349e58b0ebe/barbican-worker/0.log" Sep 30 01:18:03 crc kubenswrapper[4922]: I0930 01:18:03.070416 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-55458695f-7vzm8_f83b7812-767b-4937-b1d1-2349e58b0ebe/barbican-worker-log/0.log" Sep 30 01:18:03 crc kubenswrapper[4922]: I0930 01:18:03.212431 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-gjr5h_3d055d48-2bce-49dd-948e-03f2a6e95282/bootstrap-openstack-openstack-cell1/0.log" Sep 30 01:18:03 crc kubenswrapper[4922]: I0930 01:18:03.358733 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ce31b701-95bd-47f4-a6aa-30209b38da1d/ceilometer-central-agent/0.log" Sep 30 01:18:03 crc kubenswrapper[4922]: I0930 01:18:03.373931 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ce31b701-95bd-47f4-a6aa-30209b38da1d/ceilometer-notification-agent/0.log" Sep 30 01:18:03 crc kubenswrapper[4922]: I0930 01:18:03.470646 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ce31b701-95bd-47f4-a6aa-30209b38da1d/proxy-httpd/0.log" Sep 30 01:18:03 crc kubenswrapper[4922]: I0930 01:18:03.509958 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ce31b701-95bd-47f4-a6aa-30209b38da1d/sg-core/0.log" Sep 30 01:18:03 crc kubenswrapper[4922]: I0930 01:18:03.662704 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-2sm7q_92b1643b-fc8f-4422-bd01-01ec4dcfa718/ceph-client-openstack-openstack-cell1/0.log" Sep 30 01:18:03 crc kubenswrapper[4922]: I0930 01:18:03.872781 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_290f521e-93e4-412d-9724-40f850a4702a/cinder-api-log/0.log" Sep 30 01:18:03 crc kubenswrapper[4922]: I0930 01:18:03.880142 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_290f521e-93e4-412d-9724-40f850a4702a/cinder-api/0.log" Sep 30 01:18:04 crc kubenswrapper[4922]: I0930 01:18:04.079095 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_32ccf717-6bb2-43ce-9fdb-df41eb98bc8f/probe/0.log" Sep 30 01:18:04 crc kubenswrapper[4922]: I0930 01:18:04.189186 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_32ccf717-6bb2-43ce-9fdb-df41eb98bc8f/cinder-backup/0.log" Sep 30 01:18:04 crc kubenswrapper[4922]: I0930 01:18:04.278736 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_6b33a9fd-6cb6-408b-83c8-79605b21f4c0/cinder-scheduler/0.log" Sep 30 01:18:04 crc kubenswrapper[4922]: I0930 01:18:04.407778 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_6b33a9fd-6cb6-408b-83c8-79605b21f4c0/probe/0.log" Sep 30 01:18:04 crc kubenswrapper[4922]: I0930 01:18:04.498600 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_df10883e-41b9-46f3-a960-005cba2e6c29/cinder-volume/0.log" Sep 30 01:18:04 crc kubenswrapper[4922]: I0930 01:18:04.634752 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_df10883e-41b9-46f3-a960-005cba2e6c29/probe/0.log" Sep 30 01:18:04 crc kubenswrapper[4922]: I0930 01:18:04.701189 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-5c2qn_99cb0060-861c-4fb7-9414-fe4575595fe7/configure-network-openstack-openstack-cell1/0.log" Sep 30 01:18:04 crc kubenswrapper[4922]: I0930 01:18:04.855670 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-6cgwb_8de83491-9e2b-415e-b765-ef041d9172f1/configure-os-openstack-openstack-cell1/0.log" Sep 30 01:18:05 crc kubenswrapper[4922]: I0930 01:18:05.036350 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79889664b7-xq248_5929b9d1-6b81-44e2-8ecd-9bda69d61400/init/0.log" Sep 30 01:18:05 crc kubenswrapper[4922]: I0930 01:18:05.960678 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79889664b7-xq248_5929b9d1-6b81-44e2-8ecd-9bda69d61400/init/0.log" Sep 30 01:18:06 crc kubenswrapper[4922]: I0930 01:18:06.003530 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-79889664b7-xq248_5929b9d1-6b81-44e2-8ecd-9bda69d61400/dnsmasq-dns/0.log" Sep 30 01:18:06 crc kubenswrapper[4922]: I0930 01:18:06.129868 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-mgrb9_21204209-381f-4ebb-ae8a-70bb06b43690/download-cache-openstack-openstack-cell1/0.log" Sep 30 01:18:06 crc kubenswrapper[4922]: I0930 01:18:06.224092 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-db-purge-29319841-nx6vg_bfa2ec61-bb7e-4444-bd84-9a9a41ba1340/glance-dbpurge/0.log" Sep 30 01:18:06 crc kubenswrapper[4922]: I0930 01:18:06.423526 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ba3fa718-9def-4884-a31f-0fb295b35c53/glance-httpd/0.log" Sep 30 01:18:06 crc kubenswrapper[4922]: I0930 01:18:06.468235 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_ba3fa718-9def-4884-a31f-0fb295b35c53/glance-log/0.log" Sep 30 01:18:06 crc kubenswrapper[4922]: I0930 01:18:06.641196 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f9f3333d-2d8b-4d71-8b56-4685c20b0d82/glance-httpd/0.log" Sep 30 01:18:06 crc kubenswrapper[4922]: I0930 01:18:06.659247 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_f9f3333d-2d8b-4d71-8b56-4685c20b0d82/glance-log/0.log" Sep 30 01:18:06 crc kubenswrapper[4922]: I0930 01:18:06.900578 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-776d95b874-qp56h_fc184a80-6988-4fa7-9ca7-b58db26d4fe3/heat-api/0.log" Sep 30 01:18:08 crc kubenswrapper[4922]: I0930 01:18:08.082436 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-5d4f95dff7-7mnm2_847d3ab6-ffef-437b-b3d3-9a073fba0deb/heat-engine/0.log" Sep 30 01:18:08 crc kubenswrapper[4922]: I0930 01:18:08.108857 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-9cc9d7476-w7smm_8f8a75ca-eefb-4908-88db-d35951495a62/heat-cfnapi/0.log" Sep 30 01:18:08 crc kubenswrapper[4922]: I0930 01:18:08.372067 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-648ffd58c7-rcgrg_6c47bee4-1616-4b27-9980-79628af51f6c/horizon/0.log" Sep 30 01:18:08 crc kubenswrapper[4922]: I0930 01:18:08.444383 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-648ffd58c7-rcgrg_6c47bee4-1616-4b27-9980-79628af51f6c/horizon-log/0.log" Sep 30 01:18:08 crc kubenswrapper[4922]: I0930 01:18:08.613627 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-pwkzf_94d57e1e-85f4-4e65-9ce3-1fe527634f3a/install-certs-openstack-openstack-cell1/0.log" Sep 30 01:18:08 crc kubenswrapper[4922]: I0930 01:18:08.804620 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-f26v6_8c975a75-0ee9-4ec6-8318-ced1a887e9f9/install-os-openstack-openstack-cell1/0.log" Sep 30 01:18:09 crc kubenswrapper[4922]: I0930 01:18:09.047422 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6c4c7b57f7-q89s8_f66e8e06-9d1b-4a9e-88e7-7f83b5161faa/keystone-api/0.log" Sep 30 01:18:09 crc kubenswrapper[4922]: I0930 01:18:09.068921 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319841-ptdh5_b4a4d0c7-0816-4323-938d-ddca4803d4c2/keystone-cron/0.log" Sep 30 01:18:09 crc kubenswrapper[4922]: I0930 01:18:09.239215 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319901-ddsjv_a465ff5e-6650-4b99-b0b7-d6986c555992/keystone-cron/0.log" Sep 30 01:18:09 crc kubenswrapper[4922]: I0930 01:18:09.414119 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_f18060ca-aa4f-4b42-883b-f238c4784e37/kube-state-metrics/0.log" Sep 30 01:18:09 crc kubenswrapper[4922]: I0930 01:18:09.520747 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-ptd8n_5935a421-fbf8-44a7-b65c-fa9bfa84124d/libvirt-openstack-openstack-cell1/0.log" Sep 30 01:18:09 crc kubenswrapper[4922]: I0930 01:18:09.760598 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea/manila-api-log/0.log" Sep 30 01:18:09 crc kubenswrapper[4922]: I0930 01:18:09.841723 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_5bd9f38e-5f69-4eb7-b21c-ff6f9dc296ea/manila-api/0.log" Sep 30 01:18:09 crc kubenswrapper[4922]: I0930 01:18:09.987879 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff/manila-scheduler/0.log" Sep 30 01:18:10 crc kubenswrapper[4922]: I0930 01:18:10.024630 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_d8310f30-4eba-4cfd-ba8d-7cf44aaf9bff/probe/0.log" Sep 30 01:18:10 crc kubenswrapper[4922]: I0930 01:18:10.178841 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_223e6bbc-9cab-45dc-9975-bc2c3d87cd61/manila-share/0.log" Sep 30 01:18:10 crc kubenswrapper[4922]: I0930 01:18:10.227096 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_223e6bbc-9cab-45dc-9975-bc2c3d87cd61/probe/0.log" Sep 30 01:18:10 crc kubenswrapper[4922]: I0930 01:18:10.708008 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-c95485957-cs59k_45f42243-1e24-462d-a7a4-5f4ce2ae749d/neutron-httpd/0.log" Sep 30 01:18:10 crc kubenswrapper[4922]: I0930 01:18:10.714520 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-c95485957-cs59k_45f42243-1e24-462d-a7a4-5f4ce2ae749d/neutron-api/0.log" Sep 30 01:18:10 crc kubenswrapper[4922]: I0930 01:18:10.982573 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-9tk7v_d7bce9bc-e602-49c9-ba0c-ae70de9d1d8a/neutron-dhcp-openstack-openstack-cell1/0.log" Sep 30 01:18:11 crc kubenswrapper[4922]: I0930 01:18:11.258083 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-4mf5z_1ecf144b-092f-40a8-a132-f212de350a0e/neutron-metadata-openstack-openstack-cell1/0.log" Sep 30 01:18:11 crc kubenswrapper[4922]: I0930 01:18:11.527222 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-8vwvj_8b134d4f-b9e7-4a35-9214-ef18899dba9a/neutron-sriov-openstack-openstack-cell1/0.log" Sep 30 01:18:12 crc kubenswrapper[4922]: I0930 01:18:12.046234 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_51d744d8-84a2-4a45-8043-8bf0594dde75/nova-api-api/0.log" Sep 30 01:18:12 crc kubenswrapper[4922]: I0930 01:18:12.193591 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_51d744d8-84a2-4a45-8043-8bf0594dde75/nova-api-log/0.log" Sep 30 01:18:12 crc kubenswrapper[4922]: I0930 01:18:12.502486 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_263ec1aa-f475-4b34-950d-c93301af9645/nova-cell0-conductor-conductor/0.log" Sep 30 01:18:12 crc kubenswrapper[4922]: I0930 01:18:12.823881 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_3eeae82c-d551-41d2-88c9-35a7dda0ac0a/nova-cell1-conductor-conductor/0.log" Sep 30 01:18:13 crc kubenswrapper[4922]: I0930 01:18:13.044810 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_cdd5b23e-56a0-4711-bd79-061dc4b72cb3/memcached/0.log" Sep 30 01:18:13 crc kubenswrapper[4922]: I0930 01:18:13.113244 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_87a8e261-f388-43d6-b0c3-70694d68aa54/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 01:18:13 crc kubenswrapper[4922]: I0930 01:18:13.417505 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellxf9lv_22c6a55f-61a1-4731-b0d7-2864a91aa8ec/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Sep 30 01:18:13 crc kubenswrapper[4922]: I0930 01:18:13.656770 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-fnhsm_71bdff05-728b-4401-be44-30fa83148d22/nova-cell1-openstack-openstack-cell1/0.log" Sep 30 01:18:13 crc kubenswrapper[4922]: I0930 01:18:13.809150 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d7c3399b-495f-48e0-aaea-eed2883b5feb/nova-metadata-log/0.log" Sep 30 01:18:13 crc kubenswrapper[4922]: I0930 01:18:13.926843 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d7c3399b-495f-48e0-aaea-eed2883b5feb/nova-metadata-metadata/0.log" Sep 30 01:18:14 crc kubenswrapper[4922]: I0930 01:18:14.079380 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_b92c76a6-19db-4f9f-9d67-2fcaeb7f5a9c/nova-scheduler-scheduler/0.log" Sep 30 01:18:14 crc kubenswrapper[4922]: I0930 01:18:14.140740 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-795867f848-rw9ph_4378081b-61ce-4cb4-8363-efb6e00cfd5b/init/0.log" Sep 30 01:18:14 crc kubenswrapper[4922]: I0930 01:18:14.332339 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-795867f848-rw9ph_4378081b-61ce-4cb4-8363-efb6e00cfd5b/init/0.log" Sep 30 01:18:14 crc kubenswrapper[4922]: I0930 01:18:14.372673 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-795867f848-rw9ph_4378081b-61ce-4cb4-8363-efb6e00cfd5b/octavia-api-provider-agent/0.log" Sep 30 01:18:14 crc kubenswrapper[4922]: I0930 01:18:14.542920 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-795867f848-rw9ph_4378081b-61ce-4cb4-8363-efb6e00cfd5b/octavia-api/0.log" Sep 30 01:18:14 crc kubenswrapper[4922]: I0930 01:18:14.573584 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-b4pfg_7e8895c6-5b08-47d8-9b31-941960476555/init/0.log" Sep 30 01:18:14 crc kubenswrapper[4922]: I0930 01:18:14.825903 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-b4pfg_7e8895c6-5b08-47d8-9b31-941960476555/octavia-healthmanager/0.log" Sep 30 01:18:14 crc kubenswrapper[4922]: I0930 01:18:14.852827 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-b4pfg_7e8895c6-5b08-47d8-9b31-941960476555/init/0.log" Sep 30 01:18:15 crc kubenswrapper[4922]: I0930 01:18:15.006150 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-nznck_632a4144-06fe-4caf-8063-7314dfb2b64d/init/0.log" Sep 30 01:18:15 crc kubenswrapper[4922]: I0930 01:18:15.862498 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-nznck_632a4144-06fe-4caf-8063-7314dfb2b64d/init/0.log" Sep 30 01:18:15 crc kubenswrapper[4922]: I0930 01:18:15.883574 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-nznck_632a4144-06fe-4caf-8063-7314dfb2b64d/octavia-housekeeping/0.log" Sep 30 01:18:16 crc kubenswrapper[4922]: I0930 01:18:16.065378 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-59f8cff499-9qjl9_cb3496a2-59f9-4ed5-bef5-7e1e13058f7d/init/0.log" Sep 30 01:18:16 crc kubenswrapper[4922]: I0930 01:18:16.166674 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-59f8cff499-9qjl9_cb3496a2-59f9-4ed5-bef5-7e1e13058f7d/octavia-amphora-httpd/0.log" Sep 30 01:18:16 crc kubenswrapper[4922]: I0930 01:18:16.212464 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-59f8cff499-9qjl9_cb3496a2-59f9-4ed5-bef5-7e1e13058f7d/init/0.log" Sep 30 01:18:16 crc kubenswrapper[4922]: I0930 01:18:16.364281 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-56zpk_649e7de2-5622-4c10-81c4-7a600e720f94/init/0.log" Sep 30 01:18:16 crc kubenswrapper[4922]: I0930 01:18:16.559469 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-56zpk_649e7de2-5622-4c10-81c4-7a600e720f94/init/0.log" Sep 30 01:18:16 crc kubenswrapper[4922]: I0930 01:18:16.570331 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-56zpk_649e7de2-5622-4c10-81c4-7a600e720f94/octavia-rsyslog/0.log" Sep 30 01:18:16 crc kubenswrapper[4922]: I0930 01:18:16.726375 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-fvl7m_f0e2d4bb-07a0-4aae-b471-c8fdeb214d88/init/0.log" Sep 30 01:18:16 crc kubenswrapper[4922]: I0930 01:18:16.947427 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-fvl7m_f0e2d4bb-07a0-4aae-b471-c8fdeb214d88/init/0.log" Sep 30 01:18:17 crc kubenswrapper[4922]: I0930 01:18:17.336697 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-fvl7m_f0e2d4bb-07a0-4aae-b471-c8fdeb214d88/octavia-worker/0.log" Sep 30 01:18:17 crc kubenswrapper[4922]: I0930 01:18:17.637027 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_77609099-dbc7-4148-b163-6013051afaba/mysql-bootstrap/0.log" Sep 30 01:18:17 crc kubenswrapper[4922]: I0930 01:18:17.768042 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_77609099-dbc7-4148-b163-6013051afaba/mysql-bootstrap/0.log" Sep 30 01:18:17 crc kubenswrapper[4922]: I0930 01:18:17.782270 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_77609099-dbc7-4148-b163-6013051afaba/galera/0.log" Sep 30 01:18:17 crc kubenswrapper[4922]: I0930 01:18:17.980752 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0273df65-3375-4e15-b8ca-0279dc20353f/mysql-bootstrap/0.log" Sep 30 01:18:18 crc kubenswrapper[4922]: I0930 01:18:18.143713 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0273df65-3375-4e15-b8ca-0279dc20353f/mysql-bootstrap/0.log" Sep 30 01:18:18 crc kubenswrapper[4922]: I0930 01:18:18.166281 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_0273df65-3375-4e15-b8ca-0279dc20353f/galera/0.log" Sep 30 01:18:18 crc kubenswrapper[4922]: I0930 01:18:18.335210 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_65bab339-0cd2-43f1-8387-dedc132cb9f3/openstackclient/0.log" Sep 30 01:18:18 crc kubenswrapper[4922]: I0930 01:18:18.451498 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-xqs6b_f122e376-dde7-4cd7-a4c6-f67c6d2d5fd5/openstack-network-exporter/0.log" Sep 30 01:18:18 crc kubenswrapper[4922]: I0930 01:18:18.645855 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-h2qhf_f446bafd-364e-4088-a43d-9b4f21910312/ovsdb-server-init/0.log" Sep 30 01:18:18 crc kubenswrapper[4922]: I0930 01:18:18.771964 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-h2qhf_f446bafd-364e-4088-a43d-9b4f21910312/ovsdb-server-init/0.log" Sep 30 01:18:18 crc kubenswrapper[4922]: I0930 01:18:18.777409 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-h2qhf_f446bafd-364e-4088-a43d-9b4f21910312/ovs-vswitchd/0.log" Sep 30 01:18:18 crc kubenswrapper[4922]: I0930 01:18:18.812440 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-h2qhf_f446bafd-364e-4088-a43d-9b4f21910312/ovsdb-server/0.log" Sep 30 01:18:18 crc kubenswrapper[4922]: I0930 01:18:18.959742 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-wcjrw_0137d8ea-c9d8-442f-9a87-c827e0ad241a/ovn-controller/0.log" Sep 30 01:18:19 crc kubenswrapper[4922]: I0930 01:18:19.121347 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_966a79f9-9523-4d1e-a78b-af8bc4b8e51b/openstack-network-exporter/0.log" Sep 30 01:18:19 crc kubenswrapper[4922]: I0930 01:18:19.230959 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_966a79f9-9523-4d1e-a78b-af8bc4b8e51b/ovn-northd/0.log" Sep 30 01:18:19 crc kubenswrapper[4922]: I0930 01:18:19.375259 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-f4zxv_41047bf4-d616-4327-b2b7-edd10324c5f4/ovn-openstack-openstack-cell1/0.log" Sep 30 01:18:19 crc kubenswrapper[4922]: I0930 01:18:19.490489 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_51103d86-5353-4cb5-97ab-c287700eb9ec/openstack-network-exporter/0.log" Sep 30 01:18:19 crc kubenswrapper[4922]: I0930 01:18:19.586253 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_51103d86-5353-4cb5-97ab-c287700eb9ec/ovsdbserver-nb/0.log" Sep 30 01:18:19 crc kubenswrapper[4922]: I0930 01:18:19.649293 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_9069ed46-83fb-4a1e-9422-c87634b81112/openstack-network-exporter/0.log" Sep 30 01:18:19 crc kubenswrapper[4922]: I0930 01:18:19.777066 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_9069ed46-83fb-4a1e-9422-c87634b81112/ovsdbserver-nb/0.log" Sep 30 01:18:19 crc kubenswrapper[4922]: I0930 01:18:19.913527 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_4f278e0d-047d-4387-8910-64bb296a8565/openstack-network-exporter/0.log" Sep 30 01:18:19 crc kubenswrapper[4922]: I0930 01:18:19.972293 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_4f278e0d-047d-4387-8910-64bb296a8565/ovsdbserver-nb/0.log" Sep 30 01:18:20 crc kubenswrapper[4922]: I0930 01:18:20.104498 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2961879a-d786-4763-8353-554b884a741d/openstack-network-exporter/0.log" Sep 30 01:18:20 crc kubenswrapper[4922]: I0930 01:18:20.148221 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_2961879a-d786-4763-8353-554b884a741d/ovsdbserver-sb/0.log" Sep 30 01:18:20 crc kubenswrapper[4922]: I0930 01:18:20.291110 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_6c3a574b-04b2-4672-9810-581f6ad101d5/openstack-network-exporter/0.log" Sep 30 01:18:20 crc kubenswrapper[4922]: I0930 01:18:20.362873 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_6c3a574b-04b2-4672-9810-581f6ad101d5/ovsdbserver-sb/0.log" Sep 30 01:18:20 crc kubenswrapper[4922]: I0930 01:18:20.508335 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_98483887-cf9b-4246-ac12-4d38dae5acd0/openstack-network-exporter/0.log" Sep 30 01:18:20 crc kubenswrapper[4922]: I0930 01:18:20.601014 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_98483887-cf9b-4246-ac12-4d38dae5acd0/ovsdbserver-sb/0.log" Sep 30 01:18:20 crc kubenswrapper[4922]: I0930 01:18:20.811024 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-84cc4f499b-klhmc_81c37d6d-b586-4719-9030-c718360c46a1/placement-api/0.log" Sep 30 01:18:20 crc kubenswrapper[4922]: I0930 01:18:20.846374 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-84cc4f499b-klhmc_81c37d6d-b586-4719-9030-c718360c46a1/placement-log/0.log" Sep 30 01:18:21 crc kubenswrapper[4922]: I0930 01:18:21.009001 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cnqvjj_8471899a-f22f-4f56-b48d-8644475f56ec/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Sep 30 01:18:21 crc kubenswrapper[4922]: I0930 01:18:21.150623 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_fd2b9bb1-3eaf-4999-b558-f4778bae160b/init-config-reloader/0.log" Sep 30 01:18:21 crc kubenswrapper[4922]: I0930 01:18:21.395733 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_fd2b9bb1-3eaf-4999-b558-f4778bae160b/init-config-reloader/0.log" Sep 30 01:18:21 crc kubenswrapper[4922]: I0930 01:18:21.406512 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_fd2b9bb1-3eaf-4999-b558-f4778bae160b/config-reloader/0.log" Sep 30 01:18:21 crc kubenswrapper[4922]: I0930 01:18:21.444665 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_fd2b9bb1-3eaf-4999-b558-f4778bae160b/prometheus/0.log" Sep 30 01:18:21 crc kubenswrapper[4922]: I0930 01:18:21.579764 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_fd2b9bb1-3eaf-4999-b558-f4778bae160b/thanos-sidecar/0.log" Sep 30 01:18:21 crc kubenswrapper[4922]: I0930 01:18:21.666046 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_098321c4-3c5e-485c-bc49-fe5f5bc63b6e/setup-container/0.log" Sep 30 01:18:21 crc kubenswrapper[4922]: I0930 01:18:21.786654 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_098321c4-3c5e-485c-bc49-fe5f5bc63b6e/setup-container/0.log" Sep 30 01:18:22 crc kubenswrapper[4922]: I0930 01:18:22.074232 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_098321c4-3c5e-485c-bc49-fe5f5bc63b6e/rabbitmq/0.log" Sep 30 01:18:22 crc kubenswrapper[4922]: I0930 01:18:22.231344 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_611a3899-4697-458f-8e48-6516d4b9e899/setup-container/0.log" Sep 30 01:18:22 crc kubenswrapper[4922]: I0930 01:18:22.556473 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_611a3899-4697-458f-8e48-6516d4b9e899/setup-container/0.log" Sep 30 01:18:22 crc kubenswrapper[4922]: I0930 01:18:22.783463 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-vwq4t_af5fe853-df4f-4d18-b1e0-0a476eef7e4a/reboot-os-openstack-openstack-cell1/0.log" Sep 30 01:18:23 crc kubenswrapper[4922]: I0930 01:18:23.008479 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-g4f48_8cfc3265-59ce-4e4c-b01e-a2091edeacc2/run-os-openstack-openstack-cell1/0.log" Sep 30 01:18:23 crc kubenswrapper[4922]: I0930 01:18:23.221548 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-xc6qw_c5c82e19-0d12-49b8-9578-757544eb77e6/ssh-known-hosts-openstack/0.log" Sep 30 01:18:23 crc kubenswrapper[4922]: I0930 01:18:23.529821 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-nl9hf_8e76de18-d8f3-45ae-898e-f7262477074d/telemetry-openstack-openstack-cell1/0.log" Sep 30 01:18:23 crc kubenswrapper[4922]: I0930 01:18:23.830890 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-6hm45_8caf595c-95ea-4701-b5e2-97e970cdf01b/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Sep 30 01:18:24 crc kubenswrapper[4922]: I0930 01:18:24.113123 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-sdvr7_4798d5ab-15f7-475c-bf7b-8b9b09222f96/validate-network-openstack-openstack-cell1/0.log" Sep 30 01:18:27 crc kubenswrapper[4922]: I0930 01:18:27.637405 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_611a3899-4697-458f-8e48-6516d4b9e899/rabbitmq/0.log" Sep 30 01:18:28 crc kubenswrapper[4922]: I0930 01:18:28.913205 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:18:28 crc kubenswrapper[4922]: I0930 01:18:28.913529 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:18:28 crc kubenswrapper[4922]: I0930 01:18:28.913583 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 01:18:28 crc kubenswrapper[4922]: I0930 01:18:28.914517 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:18:28 crc kubenswrapper[4922]: I0930 01:18:28.914593 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" gracePeriod=600 Sep 30 01:18:29 crc kubenswrapper[4922]: E0930 01:18:29.040540 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:18:29 crc kubenswrapper[4922]: I0930 01:18:29.778672 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" exitCode=0 Sep 30 01:18:29 crc kubenswrapper[4922]: I0930 01:18:29.778749 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff"} Sep 30 01:18:29 crc kubenswrapper[4922]: I0930 01:18:29.778961 4922 scope.go:117] "RemoveContainer" containerID="4cad47b00f52f9d838669b5b525326373baa27775879595964bddcea5ac9aba2" Sep 30 01:18:29 crc kubenswrapper[4922]: I0930 01:18:29.779604 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:18:29 crc kubenswrapper[4922]: E0930 01:18:29.779867 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:18:29 crc kubenswrapper[4922]: I0930 01:18:29.793202 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mh2ts/crc-debug-p55pn" podStartSLOduration=106.408441887 podStartE2EDuration="1m56.793191775s" podCreationTimestamp="2025-09-30 01:16:33 +0000 UTC" firstStartedPulling="2025-09-30 01:16:33.533033714 +0000 UTC m=+10197.843322537" lastFinishedPulling="2025-09-30 01:16:43.917783612 +0000 UTC m=+10208.228072425" observedRunningTime="2025-09-30 01:16:44.659680776 +0000 UTC m=+10208.969969609" watchObservedRunningTime="2025-09-30 01:18:29.793191775 +0000 UTC m=+10314.103480588" Sep 30 01:18:41 crc kubenswrapper[4922]: I0930 01:18:41.422715 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:18:41 crc kubenswrapper[4922]: E0930 01:18:41.423755 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:18:52 crc kubenswrapper[4922]: I0930 01:18:52.052016 4922 generic.go:334] "Generic (PLEG): container finished" podID="0191d790-4f49-40d1-9eb8-e8b7e6620328" containerID="e16ac49050ae218724ec3e3345999e367c0015f595a6bf7643a0ed6c4b3432ec" exitCode=0 Sep 30 01:18:52 crc kubenswrapper[4922]: I0930 01:18:52.052146 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/crc-debug-p55pn" event={"ID":"0191d790-4f49-40d1-9eb8-e8b7e6620328","Type":"ContainerDied","Data":"e16ac49050ae218724ec3e3345999e367c0015f595a6bf7643a0ed6c4b3432ec"} Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.206548 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.234954 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2clgr\" (UniqueName: \"kubernetes.io/projected/0191d790-4f49-40d1-9eb8-e8b7e6620328-kube-api-access-2clgr\") pod \"0191d790-4f49-40d1-9eb8-e8b7e6620328\" (UID: \"0191d790-4f49-40d1-9eb8-e8b7e6620328\") " Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.235158 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0191d790-4f49-40d1-9eb8-e8b7e6620328-host\") pod \"0191d790-4f49-40d1-9eb8-e8b7e6620328\" (UID: \"0191d790-4f49-40d1-9eb8-e8b7e6620328\") " Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.235279 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0191d790-4f49-40d1-9eb8-e8b7e6620328-host" (OuterVolumeSpecName: "host") pod "0191d790-4f49-40d1-9eb8-e8b7e6620328" (UID: "0191d790-4f49-40d1-9eb8-e8b7e6620328"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.235741 4922 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0191d790-4f49-40d1-9eb8-e8b7e6620328-host\") on node \"crc\" DevicePath \"\"" Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.242269 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0191d790-4f49-40d1-9eb8-e8b7e6620328-kube-api-access-2clgr" (OuterVolumeSpecName: "kube-api-access-2clgr") pod "0191d790-4f49-40d1-9eb8-e8b7e6620328" (UID: "0191d790-4f49-40d1-9eb8-e8b7e6620328"). InnerVolumeSpecName "kube-api-access-2clgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.256709 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mh2ts/crc-debug-p55pn"] Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.268679 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mh2ts/crc-debug-p55pn"] Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.337773 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2clgr\" (UniqueName: \"kubernetes.io/projected/0191d790-4f49-40d1-9eb8-e8b7e6620328-kube-api-access-2clgr\") on node \"crc\" DevicePath \"\"" Sep 30 01:18:53 crc kubenswrapper[4922]: I0930 01:18:53.423042 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:18:53 crc kubenswrapper[4922]: E0930 01:18:53.423608 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.081061 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="914b63e7f41f75b2fafbc98770216a70c404b4138d3fb6632815259baec4880a" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.081332 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-p55pn" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.436185 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0191d790-4f49-40d1-9eb8-e8b7e6620328" path="/var/lib/kubelet/pods/0191d790-4f49-40d1-9eb8-e8b7e6620328/volumes" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.487721 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mh2ts/crc-debug-l7js2"] Sep 30 01:18:54 crc kubenswrapper[4922]: E0930 01:18:54.488233 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0191d790-4f49-40d1-9eb8-e8b7e6620328" containerName="container-00" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.488255 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="0191d790-4f49-40d1-9eb8-e8b7e6620328" containerName="container-00" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.488556 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="0191d790-4f49-40d1-9eb8-e8b7e6620328" containerName="container-00" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.489465 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.575071 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04cdb692-7307-464e-a881-1727b3bc03a5-host\") pod \"crc-debug-l7js2\" (UID: \"04cdb692-7307-464e-a881-1727b3bc03a5\") " pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.575595 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zh92\" (UniqueName: \"kubernetes.io/projected/04cdb692-7307-464e-a881-1727b3bc03a5-kube-api-access-5zh92\") pod \"crc-debug-l7js2\" (UID: \"04cdb692-7307-464e-a881-1727b3bc03a5\") " pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.677648 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zh92\" (UniqueName: \"kubernetes.io/projected/04cdb692-7307-464e-a881-1727b3bc03a5-kube-api-access-5zh92\") pod \"crc-debug-l7js2\" (UID: \"04cdb692-7307-464e-a881-1727b3bc03a5\") " pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.677719 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04cdb692-7307-464e-a881-1727b3bc03a5-host\") pod \"crc-debug-l7js2\" (UID: \"04cdb692-7307-464e-a881-1727b3bc03a5\") " pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.677908 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04cdb692-7307-464e-a881-1727b3bc03a5-host\") pod \"crc-debug-l7js2\" (UID: \"04cdb692-7307-464e-a881-1727b3bc03a5\") " pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.706720 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zh92\" (UniqueName: \"kubernetes.io/projected/04cdb692-7307-464e-a881-1727b3bc03a5-kube-api-access-5zh92\") pod \"crc-debug-l7js2\" (UID: \"04cdb692-7307-464e-a881-1727b3bc03a5\") " pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:18:54 crc kubenswrapper[4922]: I0930 01:18:54.812604 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:18:55 crc kubenswrapper[4922]: I0930 01:18:55.100485 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/crc-debug-l7js2" event={"ID":"04cdb692-7307-464e-a881-1727b3bc03a5","Type":"ContainerStarted","Data":"33859bdfb2c2b78320232c872933d3b0068332d1e0e28661672469de3520133f"} Sep 30 01:18:56 crc kubenswrapper[4922]: I0930 01:18:56.117796 4922 generic.go:334] "Generic (PLEG): container finished" podID="04cdb692-7307-464e-a881-1727b3bc03a5" containerID="d6f27b14601203ff171c664749c0003c154776364bac02d5b92a64dce1e86496" exitCode=0 Sep 30 01:18:56 crc kubenswrapper[4922]: I0930 01:18:56.117868 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/crc-debug-l7js2" event={"ID":"04cdb692-7307-464e-a881-1727b3bc03a5","Type":"ContainerDied","Data":"d6f27b14601203ff171c664749c0003c154776364bac02d5b92a64dce1e86496"} Sep 30 01:18:57 crc kubenswrapper[4922]: I0930 01:18:57.261732 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:18:57 crc kubenswrapper[4922]: I0930 01:18:57.342492 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04cdb692-7307-464e-a881-1727b3bc03a5-host\") pod \"04cdb692-7307-464e-a881-1727b3bc03a5\" (UID: \"04cdb692-7307-464e-a881-1727b3bc03a5\") " Sep 30 01:18:57 crc kubenswrapper[4922]: I0930 01:18:57.342595 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zh92\" (UniqueName: \"kubernetes.io/projected/04cdb692-7307-464e-a881-1727b3bc03a5-kube-api-access-5zh92\") pod \"04cdb692-7307-464e-a881-1727b3bc03a5\" (UID: \"04cdb692-7307-464e-a881-1727b3bc03a5\") " Sep 30 01:18:57 crc kubenswrapper[4922]: I0930 01:18:57.342653 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/04cdb692-7307-464e-a881-1727b3bc03a5-host" (OuterVolumeSpecName: "host") pod "04cdb692-7307-464e-a881-1727b3bc03a5" (UID: "04cdb692-7307-464e-a881-1727b3bc03a5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 01:18:57 crc kubenswrapper[4922]: I0930 01:18:57.343223 4922 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/04cdb692-7307-464e-a881-1727b3bc03a5-host\") on node \"crc\" DevicePath \"\"" Sep 30 01:18:57 crc kubenswrapper[4922]: I0930 01:18:57.364292 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04cdb692-7307-464e-a881-1727b3bc03a5-kube-api-access-5zh92" (OuterVolumeSpecName: "kube-api-access-5zh92") pod "04cdb692-7307-464e-a881-1727b3bc03a5" (UID: "04cdb692-7307-464e-a881-1727b3bc03a5"). InnerVolumeSpecName "kube-api-access-5zh92". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:18:57 crc kubenswrapper[4922]: I0930 01:18:57.445010 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zh92\" (UniqueName: \"kubernetes.io/projected/04cdb692-7307-464e-a881-1727b3bc03a5-kube-api-access-5zh92\") on node \"crc\" DevicePath \"\"" Sep 30 01:18:58 crc kubenswrapper[4922]: I0930 01:18:58.152645 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/crc-debug-l7js2" event={"ID":"04cdb692-7307-464e-a881-1727b3bc03a5","Type":"ContainerDied","Data":"33859bdfb2c2b78320232c872933d3b0068332d1e0e28661672469de3520133f"} Sep 30 01:18:58 crc kubenswrapper[4922]: I0930 01:18:58.153001 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33859bdfb2c2b78320232c872933d3b0068332d1e0e28661672469de3520133f" Sep 30 01:18:58 crc kubenswrapper[4922]: I0930 01:18:58.152779 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-l7js2" Sep 30 01:19:05 crc kubenswrapper[4922]: I0930 01:19:05.421721 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:19:05 crc kubenswrapper[4922]: E0930 01:19:05.422690 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:19:07 crc kubenswrapper[4922]: I0930 01:19:07.173278 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mh2ts/crc-debug-l7js2"] Sep 30 01:19:07 crc kubenswrapper[4922]: I0930 01:19:07.181946 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mh2ts/crc-debug-l7js2"] Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.400175 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mh2ts/crc-debug-wmwkg"] Sep 30 01:19:08 crc kubenswrapper[4922]: E0930 01:19:08.401055 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04cdb692-7307-464e-a881-1727b3bc03a5" containerName="container-00" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.401074 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="04cdb692-7307-464e-a881-1727b3bc03a5" containerName="container-00" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.401362 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="04cdb692-7307-464e-a881-1727b3bc03a5" containerName="container-00" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.402496 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.439029 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04cdb692-7307-464e-a881-1727b3bc03a5" path="/var/lib/kubelet/pods/04cdb692-7307-464e-a881-1727b3bc03a5/volumes" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.559773 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fczrx\" (UniqueName: \"kubernetes.io/projected/338d4e7f-2091-4282-9e35-b7206ab0767d-kube-api-access-fczrx\") pod \"crc-debug-wmwkg\" (UID: \"338d4e7f-2091-4282-9e35-b7206ab0767d\") " pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.560024 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/338d4e7f-2091-4282-9e35-b7206ab0767d-host\") pod \"crc-debug-wmwkg\" (UID: \"338d4e7f-2091-4282-9e35-b7206ab0767d\") " pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.662110 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fczrx\" (UniqueName: \"kubernetes.io/projected/338d4e7f-2091-4282-9e35-b7206ab0767d-kube-api-access-fczrx\") pod \"crc-debug-wmwkg\" (UID: \"338d4e7f-2091-4282-9e35-b7206ab0767d\") " pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.662579 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/338d4e7f-2091-4282-9e35-b7206ab0767d-host\") pod \"crc-debug-wmwkg\" (UID: \"338d4e7f-2091-4282-9e35-b7206ab0767d\") " pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.662721 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/338d4e7f-2091-4282-9e35-b7206ab0767d-host\") pod \"crc-debug-wmwkg\" (UID: \"338d4e7f-2091-4282-9e35-b7206ab0767d\") " pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.692419 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fczrx\" (UniqueName: \"kubernetes.io/projected/338d4e7f-2091-4282-9e35-b7206ab0767d-kube-api-access-fczrx\") pod \"crc-debug-wmwkg\" (UID: \"338d4e7f-2091-4282-9e35-b7206ab0767d\") " pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:08 crc kubenswrapper[4922]: I0930 01:19:08.737076 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:09 crc kubenswrapper[4922]: I0930 01:19:09.278352 4922 generic.go:334] "Generic (PLEG): container finished" podID="338d4e7f-2091-4282-9e35-b7206ab0767d" containerID="ff6c76696a7d20e79f6369f912baa173add132215b5ce21c0818171c675ca050" exitCode=0 Sep 30 01:19:09 crc kubenswrapper[4922]: I0930 01:19:09.278464 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" event={"ID":"338d4e7f-2091-4282-9e35-b7206ab0767d","Type":"ContainerDied","Data":"ff6c76696a7d20e79f6369f912baa173add132215b5ce21c0818171c675ca050"} Sep 30 01:19:09 crc kubenswrapper[4922]: I0930 01:19:09.278976 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" event={"ID":"338d4e7f-2091-4282-9e35-b7206ab0767d","Type":"ContainerStarted","Data":"a6e4369acc43af0ec5f8d678920bf46967aa7061af2377d250295ff8901359b7"} Sep 30 01:19:09 crc kubenswrapper[4922]: I0930 01:19:09.343180 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mh2ts/crc-debug-wmwkg"] Sep 30 01:19:09 crc kubenswrapper[4922]: I0930 01:19:09.360940 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mh2ts/crc-debug-wmwkg"] Sep 30 01:19:10 crc kubenswrapper[4922]: I0930 01:19:10.422128 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:10 crc kubenswrapper[4922]: I0930 01:19:10.622532 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/338d4e7f-2091-4282-9e35-b7206ab0767d-host\") pod \"338d4e7f-2091-4282-9e35-b7206ab0767d\" (UID: \"338d4e7f-2091-4282-9e35-b7206ab0767d\") " Sep 30 01:19:10 crc kubenswrapper[4922]: I0930 01:19:10.622718 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/338d4e7f-2091-4282-9e35-b7206ab0767d-host" (OuterVolumeSpecName: "host") pod "338d4e7f-2091-4282-9e35-b7206ab0767d" (UID: "338d4e7f-2091-4282-9e35-b7206ab0767d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 01:19:10 crc kubenswrapper[4922]: I0930 01:19:10.623627 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fczrx\" (UniqueName: \"kubernetes.io/projected/338d4e7f-2091-4282-9e35-b7206ab0767d-kube-api-access-fczrx\") pod \"338d4e7f-2091-4282-9e35-b7206ab0767d\" (UID: \"338d4e7f-2091-4282-9e35-b7206ab0767d\") " Sep 30 01:19:10 crc kubenswrapper[4922]: I0930 01:19:10.625422 4922 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/338d4e7f-2091-4282-9e35-b7206ab0767d-host\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:10 crc kubenswrapper[4922]: I0930 01:19:10.633092 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/338d4e7f-2091-4282-9e35-b7206ab0767d-kube-api-access-fczrx" (OuterVolumeSpecName: "kube-api-access-fczrx") pod "338d4e7f-2091-4282-9e35-b7206ab0767d" (UID: "338d4e7f-2091-4282-9e35-b7206ab0767d"). InnerVolumeSpecName "kube-api-access-fczrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:19:10 crc kubenswrapper[4922]: I0930 01:19:10.727533 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fczrx\" (UniqueName: \"kubernetes.io/projected/338d4e7f-2091-4282-9e35-b7206ab0767d-kube-api-access-fczrx\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:11 crc kubenswrapper[4922]: I0930 01:19:11.306620 4922 scope.go:117] "RemoveContainer" containerID="ff6c76696a7d20e79f6369f912baa173add132215b5ce21c0818171c675ca050" Sep 30 01:19:11 crc kubenswrapper[4922]: I0930 01:19:11.306771 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/crc-debug-wmwkg" Sep 30 01:19:12 crc kubenswrapper[4922]: I0930 01:19:12.441478 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="338d4e7f-2091-4282-9e35-b7206ab0767d" path="/var/lib/kubelet/pods/338d4e7f-2091-4282-9e35-b7206ab0767d/volumes" Sep 30 01:19:19 crc kubenswrapper[4922]: I0930 01:19:19.423567 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:19:19 crc kubenswrapper[4922]: E0930 01:19:19.424661 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:19:23 crc kubenswrapper[4922]: I0930 01:19:23.908679 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wpx2v"] Sep 30 01:19:23 crc kubenswrapper[4922]: E0930 01:19:23.910299 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="338d4e7f-2091-4282-9e35-b7206ab0767d" containerName="container-00" Sep 30 01:19:23 crc kubenswrapper[4922]: I0930 01:19:23.910320 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="338d4e7f-2091-4282-9e35-b7206ab0767d" containerName="container-00" Sep 30 01:19:23 crc kubenswrapper[4922]: I0930 01:19:23.910624 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="338d4e7f-2091-4282-9e35-b7206ab0767d" containerName="container-00" Sep 30 01:19:23 crc kubenswrapper[4922]: I0930 01:19:23.913247 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:23 crc kubenswrapper[4922]: I0930 01:19:23.922118 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wpx2v"] Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.076774 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-utilities\") pod \"redhat-operators-wpx2v\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.077263 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-catalog-content\") pod \"redhat-operators-wpx2v\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.077500 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrfr7\" (UniqueName: \"kubernetes.io/projected/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-kube-api-access-lrfr7\") pod \"redhat-operators-wpx2v\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.179499 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-catalog-content\") pod \"redhat-operators-wpx2v\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.179567 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrfr7\" (UniqueName: \"kubernetes.io/projected/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-kube-api-access-lrfr7\") pod \"redhat-operators-wpx2v\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.179612 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-utilities\") pod \"redhat-operators-wpx2v\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.180122 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-utilities\") pod \"redhat-operators-wpx2v\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.180540 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-catalog-content\") pod \"redhat-operators-wpx2v\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.349673 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrfr7\" (UniqueName: \"kubernetes.io/projected/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-kube-api-access-lrfr7\") pod \"redhat-operators-wpx2v\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:24 crc kubenswrapper[4922]: I0930 01:19:24.544974 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:25 crc kubenswrapper[4922]: I0930 01:19:25.348422 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wpx2v"] Sep 30 01:19:25 crc kubenswrapper[4922]: I0930 01:19:25.506906 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpx2v" event={"ID":"707cbf49-1dba-43b4-a13d-efd0f4df6ec7","Type":"ContainerStarted","Data":"4ce49bbb978a60dd82c78366f92e08d3a575f4cae4dcaafc94476bc26fffd3f8"} Sep 30 01:19:26 crc kubenswrapper[4922]: I0930 01:19:26.524752 4922 generic.go:334] "Generic (PLEG): container finished" podID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerID="62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d" exitCode=0 Sep 30 01:19:26 crc kubenswrapper[4922]: I0930 01:19:26.525024 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpx2v" event={"ID":"707cbf49-1dba-43b4-a13d-efd0f4df6ec7","Type":"ContainerDied","Data":"62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d"} Sep 30 01:19:28 crc kubenswrapper[4922]: I0930 01:19:28.556856 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpx2v" event={"ID":"707cbf49-1dba-43b4-a13d-efd0f4df6ec7","Type":"ContainerStarted","Data":"90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26"} Sep 30 01:19:30 crc kubenswrapper[4922]: I0930 01:19:30.587823 4922 generic.go:334] "Generic (PLEG): container finished" podID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerID="90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26" exitCode=0 Sep 30 01:19:30 crc kubenswrapper[4922]: I0930 01:19:30.587908 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpx2v" event={"ID":"707cbf49-1dba-43b4-a13d-efd0f4df6ec7","Type":"ContainerDied","Data":"90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26"} Sep 30 01:19:31 crc kubenswrapper[4922]: I0930 01:19:31.606749 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpx2v" event={"ID":"707cbf49-1dba-43b4-a13d-efd0f4df6ec7","Type":"ContainerStarted","Data":"cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789"} Sep 30 01:19:31 crc kubenswrapper[4922]: I0930 01:19:31.648254 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wpx2v" podStartSLOduration=4.164560632 podStartE2EDuration="8.64822697s" podCreationTimestamp="2025-09-30 01:19:23 +0000 UTC" firstStartedPulling="2025-09-30 01:19:26.527872353 +0000 UTC m=+10370.838161196" lastFinishedPulling="2025-09-30 01:19:31.011538711 +0000 UTC m=+10375.321827534" observedRunningTime="2025-09-30 01:19:31.637935675 +0000 UTC m=+10375.948224488" watchObservedRunningTime="2025-09-30 01:19:31.64822697 +0000 UTC m=+10375.958515813" Sep 30 01:19:33 crc kubenswrapper[4922]: I0930 01:19:33.425048 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:19:33 crc kubenswrapper[4922]: E0930 01:19:33.426892 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:19:34 crc kubenswrapper[4922]: I0930 01:19:34.545742 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:34 crc kubenswrapper[4922]: I0930 01:19:34.546921 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:35 crc kubenswrapper[4922]: I0930 01:19:35.634352 4922 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wpx2v" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerName="registry-server" probeResult="failure" output=< Sep 30 01:19:35 crc kubenswrapper[4922]: timeout: failed to connect service ":50051" within 1s Sep 30 01:19:35 crc kubenswrapper[4922]: > Sep 30 01:19:44 crc kubenswrapper[4922]: I0930 01:19:44.642212 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:44 crc kubenswrapper[4922]: I0930 01:19:44.729481 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:44 crc kubenswrapper[4922]: I0930 01:19:44.889449 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wpx2v"] Sep 30 01:19:45 crc kubenswrapper[4922]: I0930 01:19:45.777444 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wpx2v" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerName="registry-server" containerID="cri-o://cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789" gracePeriod=2 Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.242290 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.269806 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrfr7\" (UniqueName: \"kubernetes.io/projected/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-kube-api-access-lrfr7\") pod \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.269901 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-catalog-content\") pod \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.270083 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-utilities\") pod \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\" (UID: \"707cbf49-1dba-43b4-a13d-efd0f4df6ec7\") " Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.275843 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-utilities" (OuterVolumeSpecName: "utilities") pod "707cbf49-1dba-43b4-a13d-efd0f4df6ec7" (UID: "707cbf49-1dba-43b4-a13d-efd0f4df6ec7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.288065 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-kube-api-access-lrfr7" (OuterVolumeSpecName: "kube-api-access-lrfr7") pod "707cbf49-1dba-43b4-a13d-efd0f4df6ec7" (UID: "707cbf49-1dba-43b4-a13d-efd0f4df6ec7"). InnerVolumeSpecName "kube-api-access-lrfr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.379542 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrfr7\" (UniqueName: \"kubernetes.io/projected/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-kube-api-access-lrfr7\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.379805 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.385948 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "707cbf49-1dba-43b4-a13d-efd0f4df6ec7" (UID: "707cbf49-1dba-43b4-a13d-efd0f4df6ec7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.422536 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:19:46 crc kubenswrapper[4922]: E0930 01:19:46.423135 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.483645 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/707cbf49-1dba-43b4-a13d-efd0f4df6ec7-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.794694 4922 generic.go:334] "Generic (PLEG): container finished" podID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerID="cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789" exitCode=0 Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.794756 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpx2v" event={"ID":"707cbf49-1dba-43b4-a13d-efd0f4df6ec7","Type":"ContainerDied","Data":"cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789"} Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.794791 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpx2v" event={"ID":"707cbf49-1dba-43b4-a13d-efd0f4df6ec7","Type":"ContainerDied","Data":"4ce49bbb978a60dd82c78366f92e08d3a575f4cae4dcaafc94476bc26fffd3f8"} Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.794816 4922 scope.go:117] "RemoveContainer" containerID="cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.794870 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wpx2v" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.838328 4922 scope.go:117] "RemoveContainer" containerID="90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.847444 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wpx2v"] Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.862491 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wpx2v"] Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.873740 4922 scope.go:117] "RemoveContainer" containerID="62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.950784 4922 scope.go:117] "RemoveContainer" containerID="cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789" Sep 30 01:19:46 crc kubenswrapper[4922]: E0930 01:19:46.951199 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789\": container with ID starting with cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789 not found: ID does not exist" containerID="cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.951244 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789"} err="failed to get container status \"cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789\": rpc error: code = NotFound desc = could not find container \"cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789\": container with ID starting with cd777a2b0891cb7cef1c2277e63c01bcc14906f5e04cf1b7af073ceac8d15789 not found: ID does not exist" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.951272 4922 scope.go:117] "RemoveContainer" containerID="90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26" Sep 30 01:19:46 crc kubenswrapper[4922]: E0930 01:19:46.951587 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26\": container with ID starting with 90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26 not found: ID does not exist" containerID="90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.951632 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26"} err="failed to get container status \"90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26\": rpc error: code = NotFound desc = could not find container \"90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26\": container with ID starting with 90c46c5ad0b735c5d0d25bc48d04958c618b49ae93f7e74597cbb01c3606fb26 not found: ID does not exist" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.951659 4922 scope.go:117] "RemoveContainer" containerID="62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d" Sep 30 01:19:46 crc kubenswrapper[4922]: E0930 01:19:46.952066 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d\": container with ID starting with 62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d not found: ID does not exist" containerID="62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d" Sep 30 01:19:46 crc kubenswrapper[4922]: I0930 01:19:46.952093 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d"} err="failed to get container status \"62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d\": rpc error: code = NotFound desc = could not find container \"62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d\": container with ID starting with 62e02287cd8ffbb71b3458e0c98358bbbc66c2018e268a74c5352781fe221a0d not found: ID does not exist" Sep 30 01:19:48 crc kubenswrapper[4922]: I0930 01:19:48.440836 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" path="/var/lib/kubelet/pods/707cbf49-1dba-43b4-a13d-efd0f4df6ec7/volumes" Sep 30 01:19:58 crc kubenswrapper[4922]: I0930 01:19:58.422873 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:19:58 crc kubenswrapper[4922]: E0930 01:19:58.424204 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:20:12 crc kubenswrapper[4922]: I0930 01:20:12.422364 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:20:12 crc kubenswrapper[4922]: E0930 01:20:12.423525 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.082994 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt_9845266c-41aa-45af-86ac-cc5cc4cd163e/util/0.log" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.328884 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt_9845266c-41aa-45af-86ac-cc5cc4cd163e/pull/0.log" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.341067 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt_9845266c-41aa-45af-86ac-cc5cc4cd163e/util/0.log" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.369920 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt_9845266c-41aa-45af-86ac-cc5cc4cd163e/pull/0.log" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.552679 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt_9845266c-41aa-45af-86ac-cc5cc4cd163e/util/0.log" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.572678 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt_9845266c-41aa-45af-86ac-cc5cc4cd163e/extract/0.log" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.587335 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2e14749f5cf842dc933fb703e8ceab343bb95ceed1595a157e65493605vfggt_9845266c-41aa-45af-86ac-cc5cc4cd163e/pull/0.log" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.820489 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-bfzxs_b7651f52-4ceb-4d53-b74a-dfb7da473f68/manager/0.log" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.838796 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-bfzxs_b7651f52-4ceb-4d53-b74a-dfb7da473f68/kube-rbac-proxy/0.log" Sep 30 01:20:15 crc kubenswrapper[4922]: I0930 01:20:15.869570 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-dtws2_7b4517a9-f6ca-4209-9c59-a862b207ee30/kube-rbac-proxy/0.log" Sep 30 01:20:16 crc kubenswrapper[4922]: I0930 01:20:16.043997 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-dtws2_7b4517a9-f6ca-4209-9c59-a862b207ee30/manager/0.log" Sep 30 01:20:16 crc kubenswrapper[4922]: I0930 01:20:16.048611 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-7b99q_10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89/kube-rbac-proxy/0.log" Sep 30 01:20:16 crc kubenswrapper[4922]: I0930 01:20:16.080435 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-7b99q_10fb24fc-c1ed-4e04-bc4e-a9b7d6b95c89/manager/0.log" Sep 30 01:20:16 crc kubenswrapper[4922]: I0930 01:20:16.246009 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-nvw97_7bb037db-f6bf-4a16-918f-153b149b9ab4/kube-rbac-proxy/0.log" Sep 30 01:20:16 crc kubenswrapper[4922]: I0930 01:20:16.304764 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-nvw97_7bb037db-f6bf-4a16-918f-153b149b9ab4/manager/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.064544 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-trqhs_44863fa1-d920-42fc-a5d2-197762fe8c37/kube-rbac-proxy/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.079239 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-tplbn_2fc57cef-6bbd-4925-82a9-0efb9622aa81/kube-rbac-proxy/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.101531 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-trqhs_44863fa1-d920-42fc-a5d2-197762fe8c37/manager/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.278885 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-tplbn_2fc57cef-6bbd-4925-82a9-0efb9622aa81/manager/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.327206 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-7mtt9_a7fd9019-83a0-41a0-8380-fac36130cb3d/kube-rbac-proxy/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.466053 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-dpnvq_d6777dc8-0849-4744-bc01-7f790064dcfe/kube-rbac-proxy/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.572120 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-dpnvq_d6777dc8-0849-4744-bc01-7f790064dcfe/manager/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.639182 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-7mtt9_a7fd9019-83a0-41a0-8380-fac36130cb3d/manager/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.716846 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-cx7lp_4347835b-b3fa-40b5-b227-43c9da18c8d1/kube-rbac-proxy/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.808046 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-r2mxk_623e3bae-ed71-479d-8ea3-ca0ca035a8a3/kube-rbac-proxy/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.833094 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-cx7lp_4347835b-b3fa-40b5-b227-43c9da18c8d1/manager/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.881455 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-r2mxk_623e3bae-ed71-479d-8ea3-ca0ca035a8a3/manager/0.log" Sep 30 01:20:17 crc kubenswrapper[4922]: I0930 01:20:17.993303 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-dwpq7_ce8f2c94-c958-4874-a2b8-9b3ee2ca943f/kube-rbac-proxy/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.050981 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-dwpq7_ce8f2c94-c958-4874-a2b8-9b3ee2ca943f/manager/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.097872 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-vxc2x_851f4d4d-a27d-4fb8-9d26-9ea61e2eb423/kube-rbac-proxy/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.213115 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-vxc2x_851f4d4d-a27d-4fb8-9d26-9ea61e2eb423/manager/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.261667 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-czjxj_87c9a6b1-e1ea-41dc-b77f-67b22bc39517/kube-rbac-proxy/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.437213 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-czjxj_87c9a6b1-e1ea-41dc-b77f-67b22bc39517/manager/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.453572 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-zhhc4_e2481ff3-5842-4351-b0fc-71fecd911258/kube-rbac-proxy/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.540454 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-zhhc4_e2481ff3-5842-4351-b0fc-71fecd911258/manager/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.595704 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-6txmj_2dc7bdb9-eab6-4497-8888-adadebf30b1a/kube-rbac-proxy/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.619961 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-6txmj_2dc7bdb9-eab6-4497-8888-adadebf30b1a/manager/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.697344 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5b58fb7c85-tzf2t_353ae411-6209-4172-94f7-0bad05ab725b/kube-rbac-proxy/0.log" Sep 30 01:20:18 crc kubenswrapper[4922]: I0930 01:20:18.832340 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5856b6f896-mmf2h_9ab5099d-a53a-4b25-80a1-a976963dab46/kube-rbac-proxy/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.045006 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5856b6f896-mmf2h_9ab5099d-a53a-4b25-80a1-a976963dab46/operator/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.061719 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-ts4cc_915c46b2-3abe-4a77-8d50-03fecbbf6575/registry-server/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.168363 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-qxlqv_9eb5c0db-802b-4f80-ac48-9f1e75e3cebb/kube-rbac-proxy/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.372404 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-mcrvk_55d075ba-936a-4e25-ac68-01ae1a6a0a33/kube-rbac-proxy/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.407043 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-qxlqv_9eb5c0db-802b-4f80-ac48-9f1e75e3cebb/manager/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.447096 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-mcrvk_55d075ba-936a-4e25-ac68-01ae1a6a0a33/manager/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.644983 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-8blrf_1e3f1a04-5e07-4c81-93dc-beca7a598caf/operator/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.699903 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-dq4dx_5d6e0d8b-b5bf-49eb-8619-8f60d4177c32/kube-rbac-proxy/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.837612 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-dq4dx_5d6e0d8b-b5bf-49eb-8619-8f60d4177c32/manager/0.log" Sep 30 01:20:19 crc kubenswrapper[4922]: I0930 01:20:19.895694 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-wg8cr_f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5/kube-rbac-proxy/0.log" Sep 30 01:20:20 crc kubenswrapper[4922]: I0930 01:20:20.110642 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-8kgtj_846b5189-20cf-414a-b682-a2bbc6e184cf/kube-rbac-proxy/0.log" Sep 30 01:20:20 crc kubenswrapper[4922]: I0930 01:20:20.110667 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-8kgtj_846b5189-20cf-414a-b682-a2bbc6e184cf/manager/0.log" Sep 30 01:20:20 crc kubenswrapper[4922]: I0930 01:20:20.192063 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-wg8cr_f689c3a4-120f-4a3e-84f0-0fe9ef19c9d5/manager/0.log" Sep 30 01:20:20 crc kubenswrapper[4922]: I0930 01:20:20.316364 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-lr8wj_7171ed90-0002-4a34-a417-39a2645e8566/kube-rbac-proxy/0.log" Sep 30 01:20:20 crc kubenswrapper[4922]: I0930 01:20:20.356050 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-lr8wj_7171ed90-0002-4a34-a417-39a2645e8566/manager/0.log" Sep 30 01:20:21 crc kubenswrapper[4922]: I0930 01:20:21.004964 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5b58fb7c85-tzf2t_353ae411-6209-4172-94f7-0bad05ab725b/manager/0.log" Sep 30 01:20:23 crc kubenswrapper[4922]: I0930 01:20:23.422703 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:20:23 crc kubenswrapper[4922]: E0930 01:20:23.423349 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:20:34 crc kubenswrapper[4922]: I0930 01:20:34.422144 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:20:34 crc kubenswrapper[4922]: E0930 01:20:34.423148 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:20:39 crc kubenswrapper[4922]: I0930 01:20:39.820376 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-ts4x9_3c05c4fe-136b-402a-a35a-f91147e07150/control-plane-machine-set-operator/0.log" Sep 30 01:20:39 crc kubenswrapper[4922]: I0930 01:20:39.916353 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vqqxw_a5b06da6-a6a0-4367-b89d-619e1dd50c4d/kube-rbac-proxy/0.log" Sep 30 01:20:39 crc kubenswrapper[4922]: I0930 01:20:39.981342 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vqqxw_a5b06da6-a6a0-4367-b89d-619e1dd50c4d/machine-api-operator/0.log" Sep 30 01:20:48 crc kubenswrapper[4922]: I0930 01:20:48.422196 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:20:48 crc kubenswrapper[4922]: E0930 01:20:48.423134 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:20:54 crc kubenswrapper[4922]: I0930 01:20:54.073773 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-7d4cc89fcb-bns8p_344eedc4-9955-40cd-9366-e7249d7a6aa7/cert-manager-controller/0.log" Sep 30 01:20:54 crc kubenswrapper[4922]: I0930 01:20:54.234689 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7d9f95dbf-mh57p_89e3dc38-6b7e-4449-98cf-e7355b77b7aa/cert-manager-cainjector/0.log" Sep 30 01:20:54 crc kubenswrapper[4922]: I0930 01:20:54.257963 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-d969966f-2p5h8_18349d86-b50f-4900-a5a0-42b7b55f79d6/cert-manager-webhook/0.log" Sep 30 01:20:59 crc kubenswrapper[4922]: I0930 01:20:59.422236 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:20:59 crc kubenswrapper[4922]: E0930 01:20:59.423263 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:21:08 crc kubenswrapper[4922]: I0930 01:21:08.835043 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-kfbdv_819d93a0-d662-4188-811f-10078673fa3f/nmstate-console-plugin/0.log" Sep 30 01:21:08 crc kubenswrapper[4922]: I0930 01:21:08.998885 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-scshm_0e49bb85-66d4-471e-96b0-ae49830ad4e2/nmstate-handler/0.log" Sep 30 01:21:09 crc kubenswrapper[4922]: I0930 01:21:09.038937 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-vsmb9_68088178-12b9-4f0e-afa1-684bd52caf29/kube-rbac-proxy/0.log" Sep 30 01:21:09 crc kubenswrapper[4922]: I0930 01:21:09.137312 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-vsmb9_68088178-12b9-4f0e-afa1-684bd52caf29/nmstate-metrics/0.log" Sep 30 01:21:09 crc kubenswrapper[4922]: I0930 01:21:09.259638 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-m7r42_d17ba7e9-17b9-45e4-86fe-d7efcf6732be/nmstate-operator/0.log" Sep 30 01:21:09 crc kubenswrapper[4922]: I0930 01:21:09.311578 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-xk99n_3cc45e35-ac73-47d9-809e-408dbd5f0077/nmstate-webhook/0.log" Sep 30 01:21:12 crc kubenswrapper[4922]: I0930 01:21:12.422713 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:21:12 crc kubenswrapper[4922]: E0930 01:21:12.423957 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.449018 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nbhhj"] Sep 30 01:21:25 crc kubenswrapper[4922]: E0930 01:21:25.449857 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerName="registry-server" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.449869 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerName="registry-server" Sep 30 01:21:25 crc kubenswrapper[4922]: E0930 01:21:25.449913 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerName="extract-utilities" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.449921 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerName="extract-utilities" Sep 30 01:21:25 crc kubenswrapper[4922]: E0930 01:21:25.449941 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerName="extract-content" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.449948 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerName="extract-content" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.450143 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="707cbf49-1dba-43b4-a13d-efd0f4df6ec7" containerName="registry-server" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.453235 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.470724 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-catalog-content\") pod \"certified-operators-nbhhj\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.471066 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdq7n\" (UniqueName: \"kubernetes.io/projected/948b9a96-b073-4992-89f3-5277dc4e40af-kube-api-access-sdq7n\") pod \"certified-operators-nbhhj\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.471202 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-utilities\") pod \"certified-operators-nbhhj\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.498643 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nbhhj"] Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.573263 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-utilities\") pod \"certified-operators-nbhhj\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.573441 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-catalog-content\") pod \"certified-operators-nbhhj\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.573493 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdq7n\" (UniqueName: \"kubernetes.io/projected/948b9a96-b073-4992-89f3-5277dc4e40af-kube-api-access-sdq7n\") pod \"certified-operators-nbhhj\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.573755 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-utilities\") pod \"certified-operators-nbhhj\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.573879 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-catalog-content\") pod \"certified-operators-nbhhj\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.605458 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdq7n\" (UniqueName: \"kubernetes.io/projected/948b9a96-b073-4992-89f3-5277dc4e40af-kube-api-access-sdq7n\") pod \"certified-operators-nbhhj\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.804877 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-vvmvc_5d45b0ab-59c1-49eb-8b38-8343f7a246a1/kube-rbac-proxy/0.log" Sep 30 01:21:25 crc kubenswrapper[4922]: I0930 01:21:25.831594 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.323764 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-frr-files/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.412337 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nbhhj"] Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.461945 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-vvmvc_5d45b0ab-59c1-49eb-8b38-8343f7a246a1/controller/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.560531 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-frr-files/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.580284 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-reloader/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.589562 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-metrics/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.633159 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-reloader/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.779364 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-frr-files/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.794570 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-reloader/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.815729 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-metrics/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.852247 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-metrics/0.log" Sep 30 01:21:26 crc kubenswrapper[4922]: I0930 01:21:26.995552 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-reloader/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.001314 4922 generic.go:334] "Generic (PLEG): container finished" podID="948b9a96-b073-4992-89f3-5277dc4e40af" containerID="14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0" exitCode=0 Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.001355 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhhj" event={"ID":"948b9a96-b073-4992-89f3-5277dc4e40af","Type":"ContainerDied","Data":"14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0"} Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.001378 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhhj" event={"ID":"948b9a96-b073-4992-89f3-5277dc4e40af","Type":"ContainerStarted","Data":"553be5358297e4e389a566bbe99043a6232f9434738dd18395780a8ae2284f03"} Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.003264 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.036600 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/controller/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.044736 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-frr-files/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.068736 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/cp-metrics/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.256286 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/frr-metrics/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.256652 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/kube-rbac-proxy-frr/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.268978 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/kube-rbac-proxy/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.423776 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:21:27 crc kubenswrapper[4922]: E0930 01:21:27.424031 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.443487 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/reloader/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.453106 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-nnsjc_6975026a-8111-4858-80b0-bf06609ac878/frr-k8s-webhook-server/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.642085 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-c87f67444-888l5_0b4b3f98-7191-422c-a8de-afbad5b8cd5d/manager/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.819854 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-75dd84bf76-bcl8s_18415984-47a8-4a45-ad81-aa058b10d744/webhook-server/0.log" Sep 30 01:21:27 crc kubenswrapper[4922]: I0930 01:21:27.922150 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-qj9cs_3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58/kube-rbac-proxy/0.log" Sep 30 01:21:28 crc kubenswrapper[4922]: I0930 01:21:28.023543 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhhj" event={"ID":"948b9a96-b073-4992-89f3-5277dc4e40af","Type":"ContainerStarted","Data":"500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb"} Sep 30 01:21:28 crc kubenswrapper[4922]: I0930 01:21:28.937850 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-qj9cs_3c4f6284-1ca8-4204-87b1-aaa1c6a2fb58/speaker/0.log" Sep 30 01:21:30 crc kubenswrapper[4922]: I0930 01:21:30.047899 4922 generic.go:334] "Generic (PLEG): container finished" podID="948b9a96-b073-4992-89f3-5277dc4e40af" containerID="500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb" exitCode=0 Sep 30 01:21:30 crc kubenswrapper[4922]: I0930 01:21:30.048199 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhhj" event={"ID":"948b9a96-b073-4992-89f3-5277dc4e40af","Type":"ContainerDied","Data":"500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb"} Sep 30 01:21:30 crc kubenswrapper[4922]: I0930 01:21:30.154627 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7zvc7_7fa14b20-9e00-4c61-9a3e-b064d2244eb8/frr/0.log" Sep 30 01:21:31 crc kubenswrapper[4922]: I0930 01:21:31.060806 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhhj" event={"ID":"948b9a96-b073-4992-89f3-5277dc4e40af","Type":"ContainerStarted","Data":"3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd"} Sep 30 01:21:31 crc kubenswrapper[4922]: I0930 01:21:31.078768 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nbhhj" podStartSLOduration=2.632434639 podStartE2EDuration="6.078752172s" podCreationTimestamp="2025-09-30 01:21:25 +0000 UTC" firstStartedPulling="2025-09-30 01:21:27.002968719 +0000 UTC m=+10491.313257542" lastFinishedPulling="2025-09-30 01:21:30.449286272 +0000 UTC m=+10494.759575075" observedRunningTime="2025-09-30 01:21:31.074809865 +0000 UTC m=+10495.385098698" watchObservedRunningTime="2025-09-30 01:21:31.078752172 +0000 UTC m=+10495.389040995" Sep 30 01:21:35 crc kubenswrapper[4922]: I0930 01:21:35.833334 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:35 crc kubenswrapper[4922]: I0930 01:21:35.834249 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:36 crc kubenswrapper[4922]: I0930 01:21:36.032982 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:36 crc kubenswrapper[4922]: I0930 01:21:36.164903 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:36 crc kubenswrapper[4922]: I0930 01:21:36.272739 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nbhhj"] Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.162421 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nbhhj" podUID="948b9a96-b073-4992-89f3-5277dc4e40af" containerName="registry-server" containerID="cri-o://3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd" gracePeriod=2 Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.770339 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.866932 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-utilities\") pod \"948b9a96-b073-4992-89f3-5277dc4e40af\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.867536 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-catalog-content\") pod \"948b9a96-b073-4992-89f3-5277dc4e40af\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.867836 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdq7n\" (UniqueName: \"kubernetes.io/projected/948b9a96-b073-4992-89f3-5277dc4e40af-kube-api-access-sdq7n\") pod \"948b9a96-b073-4992-89f3-5277dc4e40af\" (UID: \"948b9a96-b073-4992-89f3-5277dc4e40af\") " Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.867998 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-utilities" (OuterVolumeSpecName: "utilities") pod "948b9a96-b073-4992-89f3-5277dc4e40af" (UID: "948b9a96-b073-4992-89f3-5277dc4e40af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.868629 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.872693 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948b9a96-b073-4992-89f3-5277dc4e40af-kube-api-access-sdq7n" (OuterVolumeSpecName: "kube-api-access-sdq7n") pod "948b9a96-b073-4992-89f3-5277dc4e40af" (UID: "948b9a96-b073-4992-89f3-5277dc4e40af"). InnerVolumeSpecName "kube-api-access-sdq7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.920550 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "948b9a96-b073-4992-89f3-5277dc4e40af" (UID: "948b9a96-b073-4992-89f3-5277dc4e40af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.971807 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948b9a96-b073-4992-89f3-5277dc4e40af-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:21:38 crc kubenswrapper[4922]: I0930 01:21:38.972240 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdq7n\" (UniqueName: \"kubernetes.io/projected/948b9a96-b073-4992-89f3-5277dc4e40af-kube-api-access-sdq7n\") on node \"crc\" DevicePath \"\"" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.189784 4922 generic.go:334] "Generic (PLEG): container finished" podID="948b9a96-b073-4992-89f3-5277dc4e40af" containerID="3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd" exitCode=0 Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.189846 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhhj" event={"ID":"948b9a96-b073-4992-89f3-5277dc4e40af","Type":"ContainerDied","Data":"3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd"} Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.189890 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nbhhj" event={"ID":"948b9a96-b073-4992-89f3-5277dc4e40af","Type":"ContainerDied","Data":"553be5358297e4e389a566bbe99043a6232f9434738dd18395780a8ae2284f03"} Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.189904 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nbhhj" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.189921 4922 scope.go:117] "RemoveContainer" containerID="3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.235501 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nbhhj"] Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.246576 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nbhhj"] Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.247552 4922 scope.go:117] "RemoveContainer" containerID="500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.283093 4922 scope.go:117] "RemoveContainer" containerID="14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.323249 4922 scope.go:117] "RemoveContainer" containerID="3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd" Sep 30 01:21:39 crc kubenswrapper[4922]: E0930 01:21:39.323703 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd\": container with ID starting with 3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd not found: ID does not exist" containerID="3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.323753 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd"} err="failed to get container status \"3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd\": rpc error: code = NotFound desc = could not find container \"3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd\": container with ID starting with 3b798fe79a334dcc58785c76d5b22cea9b4a5424c781a51c6b7acb8a2df3b2bd not found: ID does not exist" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.323784 4922 scope.go:117] "RemoveContainer" containerID="500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb" Sep 30 01:21:39 crc kubenswrapper[4922]: E0930 01:21:39.324631 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb\": container with ID starting with 500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb not found: ID does not exist" containerID="500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.324663 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb"} err="failed to get container status \"500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb\": rpc error: code = NotFound desc = could not find container \"500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb\": container with ID starting with 500dd0c71b1fde8902de4937c293d885e0e85fe8ac8dbd8bb9b953a3f4cf48fb not found: ID does not exist" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.324684 4922 scope.go:117] "RemoveContainer" containerID="14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0" Sep 30 01:21:39 crc kubenswrapper[4922]: E0930 01:21:39.326383 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0\": container with ID starting with 14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0 not found: ID does not exist" containerID="14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.326440 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0"} err="failed to get container status \"14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0\": rpc error: code = NotFound desc = could not find container \"14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0\": container with ID starting with 14e0382030f87492b8ce11defcd286ca2775e6e56baf9d00c6175dadcb0107f0 not found: ID does not exist" Sep 30 01:21:39 crc kubenswrapper[4922]: I0930 01:21:39.422206 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:21:39 crc kubenswrapper[4922]: E0930 01:21:39.422619 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:21:40 crc kubenswrapper[4922]: I0930 01:21:40.443516 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="948b9a96-b073-4992-89f3-5277dc4e40af" path="/var/lib/kubelet/pods/948b9a96-b073-4992-89f3-5277dc4e40af/volumes" Sep 30 01:21:43 crc kubenswrapper[4922]: I0930 01:21:43.915067 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q_ca2c2f99-b018-4313-90e5-73ae578f6717/util/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.313200 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q_ca2c2f99-b018-4313-90e5-73ae578f6717/util/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.323819 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q_ca2c2f99-b018-4313-90e5-73ae578f6717/pull/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.376076 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q_ca2c2f99-b018-4313-90e5-73ae578f6717/pull/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.464235 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q_ca2c2f99-b018-4313-90e5-73ae578f6717/util/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.490961 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q_ca2c2f99-b018-4313-90e5-73ae578f6717/extract/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.535105 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69c5j8q_ca2c2f99-b018-4313-90e5-73ae578f6717/pull/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.647509 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt_d286b91a-b67a-4540-b8e7-5296d645fead/util/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.824556 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt_d286b91a-b67a-4540-b8e7-5296d645fead/pull/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.850053 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt_d286b91a-b67a-4540-b8e7-5296d645fead/pull/0.log" Sep 30 01:21:44 crc kubenswrapper[4922]: I0930 01:21:44.862856 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt_d286b91a-b67a-4540-b8e7-5296d645fead/util/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.072043 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt_d286b91a-b67a-4540-b8e7-5296d645fead/pull/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.077747 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt_d286b91a-b67a-4540-b8e7-5296d645fead/extract/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.090643 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bchnkkt_d286b91a-b67a-4540-b8e7-5296d645fead/util/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.278930 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc_537ab108-9132-4e26-b55a-821c042d8c0d/util/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.448616 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc_537ab108-9132-4e26-b55a-821c042d8c0d/util/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.529710 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc_537ab108-9132-4e26-b55a-821c042d8c0d/pull/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.557263 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc_537ab108-9132-4e26-b55a-821c042d8c0d/pull/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.703041 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc_537ab108-9132-4e26-b55a-821c042d8c0d/util/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.712078 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc_537ab108-9132-4e26-b55a-821c042d8c0d/pull/0.log" Sep 30 01:21:45 crc kubenswrapper[4922]: I0930 01:21:45.728814 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dspdqc_537ab108-9132-4e26-b55a-821c042d8c0d/extract/0.log" Sep 30 01:21:46 crc kubenswrapper[4922]: I0930 01:21:46.538724 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xk4lz_5dfb81d8-4b0b-4414-8a11-f0d8b72e471f/extract-utilities/0.log" Sep 30 01:21:46 crc kubenswrapper[4922]: I0930 01:21:46.731847 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xk4lz_5dfb81d8-4b0b-4414-8a11-f0d8b72e471f/extract-utilities/0.log" Sep 30 01:21:46 crc kubenswrapper[4922]: I0930 01:21:46.758709 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xk4lz_5dfb81d8-4b0b-4414-8a11-f0d8b72e471f/extract-content/0.log" Sep 30 01:21:46 crc kubenswrapper[4922]: I0930 01:21:46.808986 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xk4lz_5dfb81d8-4b0b-4414-8a11-f0d8b72e471f/extract-content/0.log" Sep 30 01:21:46 crc kubenswrapper[4922]: I0930 01:21:46.940797 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xk4lz_5dfb81d8-4b0b-4414-8a11-f0d8b72e471f/extract-utilities/0.log" Sep 30 01:21:47 crc kubenswrapper[4922]: I0930 01:21:47.043842 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xk4lz_5dfb81d8-4b0b-4414-8a11-f0d8b72e471f/extract-content/0.log" Sep 30 01:21:47 crc kubenswrapper[4922]: I0930 01:21:47.163478 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hhckg_331f6e00-a958-4a5c-96de-411890893fad/extract-utilities/0.log" Sep 30 01:21:47 crc kubenswrapper[4922]: I0930 01:21:47.447406 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hhckg_331f6e00-a958-4a5c-96de-411890893fad/extract-utilities/0.log" Sep 30 01:21:47 crc kubenswrapper[4922]: I0930 01:21:47.466752 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hhckg_331f6e00-a958-4a5c-96de-411890893fad/extract-content/0.log" Sep 30 01:21:47 crc kubenswrapper[4922]: I0930 01:21:47.474021 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hhckg_331f6e00-a958-4a5c-96de-411890893fad/extract-content/0.log" Sep 30 01:21:47 crc kubenswrapper[4922]: I0930 01:21:47.662250 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hhckg_331f6e00-a958-4a5c-96de-411890893fad/extract-utilities/0.log" Sep 30 01:21:47 crc kubenswrapper[4922]: I0930 01:21:47.737422 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hhckg_331f6e00-a958-4a5c-96de-411890893fad/extract-content/0.log" Sep 30 01:21:48 crc kubenswrapper[4922]: I0930 01:21:48.346736 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7_7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92/util/0.log" Sep 30 01:21:48 crc kubenswrapper[4922]: I0930 01:21:48.546372 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xk4lz_5dfb81d8-4b0b-4414-8a11-f0d8b72e471f/registry-server/0.log" Sep 30 01:21:48 crc kubenswrapper[4922]: I0930 01:21:48.563335 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7_7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92/util/0.log" Sep 30 01:21:48 crc kubenswrapper[4922]: I0930 01:21:48.652307 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7_7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92/pull/0.log" Sep 30 01:21:48 crc kubenswrapper[4922]: I0930 01:21:48.671002 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7_7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92/pull/0.log" Sep 30 01:21:48 crc kubenswrapper[4922]: I0930 01:21:48.833521 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7_7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92/util/0.log" Sep 30 01:21:48 crc kubenswrapper[4922]: I0930 01:21:48.904157 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7_7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92/pull/0.log" Sep 30 01:21:48 crc kubenswrapper[4922]: I0930 01:21:48.912727 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96h9tf7_7d6fb6d7-4fde-45d6-84c3-5e2f3b4bfb92/extract/0.log" Sep 30 01:21:48 crc kubenswrapper[4922]: I0930 01:21:48.977721 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hhckg_331f6e00-a958-4a5c-96de-411890893fad/registry-server/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.040120 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-wfjrk_95f36381-a854-43df-bbe6-7afddea2b2c7/marketplace-operator/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.111855 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h9fcf_868a3ad6-a6e9-4e1c-9aad-638cc0337f1a/extract-utilities/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.283596 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h9fcf_868a3ad6-a6e9-4e1c-9aad-638cc0337f1a/extract-utilities/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.283761 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h9fcf_868a3ad6-a6e9-4e1c-9aad-638cc0337f1a/extract-content/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.284211 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h9fcf_868a3ad6-a6e9-4e1c-9aad-638cc0337f1a/extract-content/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.462886 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr6r7_43844139-65aa-433b-9670-008ab6c350e3/extract-utilities/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.470609 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h9fcf_868a3ad6-a6e9-4e1c-9aad-638cc0337f1a/extract-utilities/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.582432 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h9fcf_868a3ad6-a6e9-4e1c-9aad-638cc0337f1a/extract-content/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.737376 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-h9fcf_868a3ad6-a6e9-4e1c-9aad-638cc0337f1a/registry-server/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.766331 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr6r7_43844139-65aa-433b-9670-008ab6c350e3/extract-content/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.812353 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr6r7_43844139-65aa-433b-9670-008ab6c350e3/extract-content/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.817345 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr6r7_43844139-65aa-433b-9670-008ab6c350e3/extract-utilities/0.log" Sep 30 01:21:49 crc kubenswrapper[4922]: I0930 01:21:49.990911 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr6r7_43844139-65aa-433b-9670-008ab6c350e3/extract-content/0.log" Sep 30 01:21:50 crc kubenswrapper[4922]: I0930 01:21:50.024239 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr6r7_43844139-65aa-433b-9670-008ab6c350e3/extract-utilities/0.log" Sep 30 01:21:50 crc kubenswrapper[4922]: I0930 01:21:50.837518 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tr6r7_43844139-65aa-433b-9670-008ab6c350e3/registry-server/0.log" Sep 30 01:21:52 crc kubenswrapper[4922]: I0930 01:21:52.421814 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:21:52 crc kubenswrapper[4922]: E0930 01:21:52.422291 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:22:04 crc kubenswrapper[4922]: I0930 01:22:04.427686 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:22:04 crc kubenswrapper[4922]: E0930 01:22:04.428536 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:22:06 crc kubenswrapper[4922]: I0930 01:22:06.394905 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-sdjrr_5ed9b0d2-2c6a-4526-8a40-c4361f9020f9/prometheus-operator/0.log" Sep 30 01:22:06 crc kubenswrapper[4922]: I0930 01:22:06.516650 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-65c9dcf957-4n96v_1cb5b40d-54dd-4610-a982-3490f932ac7e/prometheus-operator-admission-webhook/0.log" Sep 30 01:22:06 crc kubenswrapper[4922]: I0930 01:22:06.569469 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-65c9dcf957-l45ht_42795bdf-428c-4e42-a1bd-c410f3984a18/prometheus-operator-admission-webhook/0.log" Sep 30 01:22:06 crc kubenswrapper[4922]: I0930 01:22:06.687302 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-tdgkh_3dfa4ab9-ec96-4c28-bf02-9395095442e2/operator/0.log" Sep 30 01:22:06 crc kubenswrapper[4922]: I0930 01:22:06.744695 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-wlsdp_3e46edec-6c01-4236-a0ca-3125fb7126db/perses-operator/0.log" Sep 30 01:22:18 crc kubenswrapper[4922]: I0930 01:22:18.423121 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:22:18 crc kubenswrapper[4922]: E0930 01:22:18.424054 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:22:25 crc kubenswrapper[4922]: E0930 01:22:25.085741 4922 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.227:41074->38.102.83.227:41689: write tcp 38.102.83.227:41074->38.102.83.227:41689: write: broken pipe Sep 30 01:22:32 crc kubenswrapper[4922]: I0930 01:22:32.421666 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:22:32 crc kubenswrapper[4922]: E0930 01:22:32.422297 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:22:44 crc kubenswrapper[4922]: I0930 01:22:44.422435 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:22:44 crc kubenswrapper[4922]: E0930 01:22:44.423547 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:22:55 crc kubenswrapper[4922]: I0930 01:22:55.424758 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:22:55 crc kubenswrapper[4922]: E0930 01:22:55.427646 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:23:00 crc kubenswrapper[4922]: I0930 01:23:00.824819 4922 scope.go:117] "RemoveContainer" containerID="e16ac49050ae218724ec3e3345999e367c0015f595a6bf7643a0ed6c4b3432ec" Sep 30 01:23:06 crc kubenswrapper[4922]: I0930 01:23:06.444155 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:23:06 crc kubenswrapper[4922]: E0930 01:23:06.445493 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:23:20 crc kubenswrapper[4922]: I0930 01:23:20.422424 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:23:20 crc kubenswrapper[4922]: E0930 01:23:20.423324 4922 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-pbnnm_openshift-machine-config-operator(347374f7-ade0-4434-b26d-db474c4413f9)\"" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" Sep 30 01:23:35 crc kubenswrapper[4922]: I0930 01:23:35.424907 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:23:36 crc kubenswrapper[4922]: I0930 01:23:36.654200 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"77efc3523d02ef788b9a0d21b19c83c061533da9b021ef612b5675d911d75feb"} Sep 30 01:24:51 crc kubenswrapper[4922]: I0930 01:24:51.750978 4922 generic.go:334] "Generic (PLEG): container finished" podID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" containerID="a56f00a5159702a4d224544fdf1f13b607b754cd2948510da59a69acb99d7ee9" exitCode=0 Sep 30 01:24:51 crc kubenswrapper[4922]: I0930 01:24:51.751061 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" event={"ID":"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a","Type":"ContainerDied","Data":"a56f00a5159702a4d224544fdf1f13b607b754cd2948510da59a69acb99d7ee9"} Sep 30 01:24:51 crc kubenswrapper[4922]: I0930 01:24:51.752341 4922 scope.go:117] "RemoveContainer" containerID="a56f00a5159702a4d224544fdf1f13b607b754cd2948510da59a69acb99d7ee9" Sep 30 01:24:52 crc kubenswrapper[4922]: I0930 01:24:52.114197 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mh2ts_must-gather-9fwl7_6005d500-5fcd-4ed9-a8b2-3807f2d4e25a/gather/0.log" Sep 30 01:25:00 crc kubenswrapper[4922]: I0930 01:25:00.954017 4922 scope.go:117] "RemoveContainer" containerID="d6f27b14601203ff171c664749c0003c154776364bac02d5b92a64dce1e86496" Sep 30 01:25:02 crc kubenswrapper[4922]: I0930 01:25:02.177534 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mh2ts/must-gather-9fwl7"] Sep 30 01:25:02 crc kubenswrapper[4922]: I0930 01:25:02.178269 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" podUID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" containerName="copy" containerID="cri-o://e5607868a2eca84ce8942ade6eb02cc2f181556921559b78e4fa73b8b4de97d4" gracePeriod=2 Sep 30 01:25:02 crc kubenswrapper[4922]: I0930 01:25:02.191053 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mh2ts/must-gather-9fwl7"] Sep 30 01:25:02 crc kubenswrapper[4922]: I0930 01:25:02.967547 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mh2ts_must-gather-9fwl7_6005d500-5fcd-4ed9-a8b2-3807f2d4e25a/copy/0.log" Sep 30 01:25:02 crc kubenswrapper[4922]: I0930 01:25:02.973426 4922 generic.go:334] "Generic (PLEG): container finished" podID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" containerID="e5607868a2eca84ce8942ade6eb02cc2f181556921559b78e4fa73b8b4de97d4" exitCode=143 Sep 30 01:25:02 crc kubenswrapper[4922]: I0930 01:25:02.973569 4922 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0e66e259a0d494592ddee7b085c745177a1e5279fcfa18532d1610911a0454e" Sep 30 01:25:03 crc kubenswrapper[4922]: I0930 01:25:03.031624 4922 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mh2ts_must-gather-9fwl7_6005d500-5fcd-4ed9-a8b2-3807f2d4e25a/copy/0.log" Sep 30 01:25:03 crc kubenswrapper[4922]: I0930 01:25:03.041728 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:25:03 crc kubenswrapper[4922]: I0930 01:25:03.076782 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-must-gather-output\") pod \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\" (UID: \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\") " Sep 30 01:25:03 crc kubenswrapper[4922]: I0930 01:25:03.076879 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bj6l8\" (UniqueName: \"kubernetes.io/projected/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-kube-api-access-bj6l8\") pod \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\" (UID: \"6005d500-5fcd-4ed9-a8b2-3807f2d4e25a\") " Sep 30 01:25:03 crc kubenswrapper[4922]: I0930 01:25:03.094527 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-kube-api-access-bj6l8" (OuterVolumeSpecName: "kube-api-access-bj6l8") pod "6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" (UID: "6005d500-5fcd-4ed9-a8b2-3807f2d4e25a"). InnerVolumeSpecName "kube-api-access-bj6l8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:25:03 crc kubenswrapper[4922]: I0930 01:25:03.181359 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bj6l8\" (UniqueName: \"kubernetes.io/projected/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-kube-api-access-bj6l8\") on node \"crc\" DevicePath \"\"" Sep 30 01:25:03 crc kubenswrapper[4922]: I0930 01:25:03.361957 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" (UID: "6005d500-5fcd-4ed9-a8b2-3807f2d4e25a"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:25:03 crc kubenswrapper[4922]: I0930 01:25:03.385251 4922 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 01:25:03 crc kubenswrapper[4922]: I0930 01:25:03.983545 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mh2ts/must-gather-9fwl7" Sep 30 01:25:04 crc kubenswrapper[4922]: I0930 01:25:04.433467 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" path="/var/lib/kubelet/pods/6005d500-5fcd-4ed9-a8b2-3807f2d4e25a/volumes" Sep 30 01:25:58 crc kubenswrapper[4922]: I0930 01:25:58.912429 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:25:58 crc kubenswrapper[4922]: I0930 01:25:58.913157 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:26:01 crc kubenswrapper[4922]: I0930 01:26:01.034079 4922 scope.go:117] "RemoveContainer" containerID="a56f00a5159702a4d224544fdf1f13b607b754cd2948510da59a69acb99d7ee9" Sep 30 01:26:01 crc kubenswrapper[4922]: I0930 01:26:01.491168 4922 scope.go:117] "RemoveContainer" containerID="e5607868a2eca84ce8942ade6eb02cc2f181556921559b78e4fa73b8b4de97d4" Sep 30 01:26:14 crc kubenswrapper[4922]: I0930 01:26:14.809056 4922 trace.go:236] Trace[752556688]: "Calculate volume metrics of mysql-db for pod openstack/openstack-cell1-galera-0" (30-Sep-2025 01:26:13.623) (total time: 1183ms): Sep 30 01:26:14 crc kubenswrapper[4922]: Trace[752556688]: [1.1837778s] [1.1837778s] END Sep 30 01:26:28 crc kubenswrapper[4922]: I0930 01:26:28.913085 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:26:28 crc kubenswrapper[4922]: I0930 01:26:28.914738 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.033859 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-brjh2"] Sep 30 01:26:46 crc kubenswrapper[4922]: E0930 01:26:46.035358 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" containerName="gather" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.035373 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" containerName="gather" Sep 30 01:26:46 crc kubenswrapper[4922]: E0930 01:26:46.035381 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b9a96-b073-4992-89f3-5277dc4e40af" containerName="extract-utilities" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.035417 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b9a96-b073-4992-89f3-5277dc4e40af" containerName="extract-utilities" Sep 30 01:26:46 crc kubenswrapper[4922]: E0930 01:26:46.035452 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b9a96-b073-4992-89f3-5277dc4e40af" containerName="extract-content" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.035461 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b9a96-b073-4992-89f3-5277dc4e40af" containerName="extract-content" Sep 30 01:26:46 crc kubenswrapper[4922]: E0930 01:26:46.035498 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" containerName="copy" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.035506 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" containerName="copy" Sep 30 01:26:46 crc kubenswrapper[4922]: E0930 01:26:46.035536 4922 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948b9a96-b073-4992-89f3-5277dc4e40af" containerName="registry-server" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.035542 4922 state_mem.go:107] "Deleted CPUSet assignment" podUID="948b9a96-b073-4992-89f3-5277dc4e40af" containerName="registry-server" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.036009 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="948b9a96-b073-4992-89f3-5277dc4e40af" containerName="registry-server" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.036034 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" containerName="gather" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.036049 4922 memory_manager.go:354] "RemoveStaleState removing state" podUID="6005d500-5fcd-4ed9-a8b2-3807f2d4e25a" containerName="copy" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.040101 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.091861 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-brjh2"] Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.188038 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-utilities\") pod \"community-operators-brjh2\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.188583 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-catalog-content\") pod \"community-operators-brjh2\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.188735 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld9sd\" (UniqueName: \"kubernetes.io/projected/fd723389-ea27-472f-9d51-9fa6565358d4-kube-api-access-ld9sd\") pod \"community-operators-brjh2\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.293013 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-utilities\") pod \"community-operators-brjh2\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.293511 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-catalog-content\") pod \"community-operators-brjh2\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.293698 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld9sd\" (UniqueName: \"kubernetes.io/projected/fd723389-ea27-472f-9d51-9fa6565358d4-kube-api-access-ld9sd\") pod \"community-operators-brjh2\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.294107 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-catalog-content\") pod \"community-operators-brjh2\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.294435 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-utilities\") pod \"community-operators-brjh2\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.324572 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld9sd\" (UniqueName: \"kubernetes.io/projected/fd723389-ea27-472f-9d51-9fa6565358d4-kube-api-access-ld9sd\") pod \"community-operators-brjh2\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:46 crc kubenswrapper[4922]: I0930 01:26:46.390516 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:47 crc kubenswrapper[4922]: I0930 01:26:46.944964 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-brjh2"] Sep 30 01:26:47 crc kubenswrapper[4922]: I0930 01:26:47.502415 4922 generic.go:334] "Generic (PLEG): container finished" podID="fd723389-ea27-472f-9d51-9fa6565358d4" containerID="9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f" exitCode=0 Sep 30 01:26:47 crc kubenswrapper[4922]: I0930 01:26:47.502473 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brjh2" event={"ID":"fd723389-ea27-472f-9d51-9fa6565358d4","Type":"ContainerDied","Data":"9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f"} Sep 30 01:26:47 crc kubenswrapper[4922]: I0930 01:26:47.502940 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brjh2" event={"ID":"fd723389-ea27-472f-9d51-9fa6565358d4","Type":"ContainerStarted","Data":"175ba6f6a2d6e862c20c310e6fdcfbbdf2c1d4be2389678cc5e1e5c1b9fd947e"} Sep 30 01:26:47 crc kubenswrapper[4922]: I0930 01:26:47.504607 4922 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 01:26:48 crc kubenswrapper[4922]: I0930 01:26:48.529136 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brjh2" event={"ID":"fd723389-ea27-472f-9d51-9fa6565358d4","Type":"ContainerStarted","Data":"4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc"} Sep 30 01:26:49 crc kubenswrapper[4922]: I0930 01:26:49.545839 4922 generic.go:334] "Generic (PLEG): container finished" podID="fd723389-ea27-472f-9d51-9fa6565358d4" containerID="4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc" exitCode=0 Sep 30 01:26:49 crc kubenswrapper[4922]: I0930 01:26:49.545983 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brjh2" event={"ID":"fd723389-ea27-472f-9d51-9fa6565358d4","Type":"ContainerDied","Data":"4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc"} Sep 30 01:26:50 crc kubenswrapper[4922]: I0930 01:26:50.559959 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brjh2" event={"ID":"fd723389-ea27-472f-9d51-9fa6565358d4","Type":"ContainerStarted","Data":"fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760"} Sep 30 01:26:50 crc kubenswrapper[4922]: I0930 01:26:50.598536 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-brjh2" podStartSLOduration=3.132355277 podStartE2EDuration="5.598508338s" podCreationTimestamp="2025-09-30 01:26:45 +0000 UTC" firstStartedPulling="2025-09-30 01:26:47.504417304 +0000 UTC m=+10811.814706117" lastFinishedPulling="2025-09-30 01:26:49.970570325 +0000 UTC m=+10814.280859178" observedRunningTime="2025-09-30 01:26:50.581416675 +0000 UTC m=+10814.891705488" watchObservedRunningTime="2025-09-30 01:26:50.598508338 +0000 UTC m=+10814.908797181" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.395339 4922 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-97gxp"] Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.398774 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.408130 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-97gxp"] Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.568736 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-utilities\") pod \"redhat-marketplace-97gxp\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.569535 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqrwd\" (UniqueName: \"kubernetes.io/projected/af3655d9-2e97-400b-a17d-3419c26e9196-kube-api-access-dqrwd\") pod \"redhat-marketplace-97gxp\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.570689 4922 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-catalog-content\") pod \"redhat-marketplace-97gxp\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.672651 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqrwd\" (UniqueName: \"kubernetes.io/projected/af3655d9-2e97-400b-a17d-3419c26e9196-kube-api-access-dqrwd\") pod \"redhat-marketplace-97gxp\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.672824 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-catalog-content\") pod \"redhat-marketplace-97gxp\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.672896 4922 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-utilities\") pod \"redhat-marketplace-97gxp\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.673506 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-utilities\") pod \"redhat-marketplace-97gxp\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.673518 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-catalog-content\") pod \"redhat-marketplace-97gxp\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.699661 4922 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqrwd\" (UniqueName: \"kubernetes.io/projected/af3655d9-2e97-400b-a17d-3419c26e9196-kube-api-access-dqrwd\") pod \"redhat-marketplace-97gxp\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:52 crc kubenswrapper[4922]: I0930 01:26:52.725438 4922 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:26:53 crc kubenswrapper[4922]: I0930 01:26:53.253511 4922 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-97gxp"] Sep 30 01:26:53 crc kubenswrapper[4922]: W0930 01:26:53.561607 4922 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf3655d9_2e97_400b_a17d_3419c26e9196.slice/crio-5f6fa1df494877c494d172233cf79484f2ebf5b5a4127e6f63f45194cbb52ed0 WatchSource:0}: Error finding container 5f6fa1df494877c494d172233cf79484f2ebf5b5a4127e6f63f45194cbb52ed0: Status 404 returned error can't find the container with id 5f6fa1df494877c494d172233cf79484f2ebf5b5a4127e6f63f45194cbb52ed0 Sep 30 01:26:53 crc kubenswrapper[4922]: I0930 01:26:53.630771 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-97gxp" event={"ID":"af3655d9-2e97-400b-a17d-3419c26e9196","Type":"ContainerStarted","Data":"5f6fa1df494877c494d172233cf79484f2ebf5b5a4127e6f63f45194cbb52ed0"} Sep 30 01:26:54 crc kubenswrapper[4922]: I0930 01:26:54.649324 4922 generic.go:334] "Generic (PLEG): container finished" podID="af3655d9-2e97-400b-a17d-3419c26e9196" containerID="9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206" exitCode=0 Sep 30 01:26:54 crc kubenswrapper[4922]: I0930 01:26:54.649463 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-97gxp" event={"ID":"af3655d9-2e97-400b-a17d-3419c26e9196","Type":"ContainerDied","Data":"9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206"} Sep 30 01:26:56 crc kubenswrapper[4922]: I0930 01:26:56.390804 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:56 crc kubenswrapper[4922]: I0930 01:26:56.451154 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:56 crc kubenswrapper[4922]: I0930 01:26:56.503469 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:56 crc kubenswrapper[4922]: I0930 01:26:56.678000 4922 generic.go:334] "Generic (PLEG): container finished" podID="af3655d9-2e97-400b-a17d-3419c26e9196" containerID="919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185" exitCode=0 Sep 30 01:26:56 crc kubenswrapper[4922]: I0930 01:26:56.678065 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-97gxp" event={"ID":"af3655d9-2e97-400b-a17d-3419c26e9196","Type":"ContainerDied","Data":"919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185"} Sep 30 01:26:56 crc kubenswrapper[4922]: I0930 01:26:56.754816 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:26:57 crc kubenswrapper[4922]: I0930 01:26:57.692983 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-97gxp" event={"ID":"af3655d9-2e97-400b-a17d-3419c26e9196","Type":"ContainerStarted","Data":"2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56"} Sep 30 01:26:57 crc kubenswrapper[4922]: I0930 01:26:57.716807 4922 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-97gxp" podStartSLOduration=3.234637821 podStartE2EDuration="5.716781228s" podCreationTimestamp="2025-09-30 01:26:52 +0000 UTC" firstStartedPulling="2025-09-30 01:26:54.652472811 +0000 UTC m=+10818.962761624" lastFinishedPulling="2025-09-30 01:26:57.134616178 +0000 UTC m=+10821.444905031" observedRunningTime="2025-09-30 01:26:57.709901958 +0000 UTC m=+10822.020190781" watchObservedRunningTime="2025-09-30 01:26:57.716781228 +0000 UTC m=+10822.027070061" Sep 30 01:26:58 crc kubenswrapper[4922]: I0930 01:26:58.375114 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-brjh2"] Sep 30 01:26:58 crc kubenswrapper[4922]: I0930 01:26:58.912547 4922 patch_prober.go:28] interesting pod/machine-config-daemon-pbnnm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 01:26:58 crc kubenswrapper[4922]: I0930 01:26:58.912619 4922 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 01:26:58 crc kubenswrapper[4922]: I0930 01:26:58.912663 4922 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" Sep 30 01:26:58 crc kubenswrapper[4922]: I0930 01:26:58.913698 4922 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"77efc3523d02ef788b9a0d21b19c83c061533da9b021ef612b5675d911d75feb"} pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 01:26:58 crc kubenswrapper[4922]: I0930 01:26:58.913755 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" podUID="347374f7-ade0-4434-b26d-db474c4413f9" containerName="machine-config-daemon" containerID="cri-o://77efc3523d02ef788b9a0d21b19c83c061533da9b021ef612b5675d911d75feb" gracePeriod=600 Sep 30 01:26:59 crc kubenswrapper[4922]: I0930 01:26:59.732157 4922 generic.go:334] "Generic (PLEG): container finished" podID="347374f7-ade0-4434-b26d-db474c4413f9" containerID="77efc3523d02ef788b9a0d21b19c83c061533da9b021ef612b5675d911d75feb" exitCode=0 Sep 30 01:26:59 crc kubenswrapper[4922]: I0930 01:26:59.732351 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerDied","Data":"77efc3523d02ef788b9a0d21b19c83c061533da9b021ef612b5675d911d75feb"} Sep 30 01:26:59 crc kubenswrapper[4922]: I0930 01:26:59.732919 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-pbnnm" event={"ID":"347374f7-ade0-4434-b26d-db474c4413f9","Type":"ContainerStarted","Data":"a52d0057528798d75f7cc69a792a67c3ce0a932440d5486a75759d080634e810"} Sep 30 01:26:59 crc kubenswrapper[4922]: I0930 01:26:59.733004 4922 scope.go:117] "RemoveContainer" containerID="62f5d7deb68f3fa9451db5916922c8cf81c59629b6206d61c0e232a0ec9f31ff" Sep 30 01:26:59 crc kubenswrapper[4922]: I0930 01:26:59.733150 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-brjh2" podUID="fd723389-ea27-472f-9d51-9fa6565358d4" containerName="registry-server" containerID="cri-o://fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760" gracePeriod=2 Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.234303 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.345698 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-catalog-content\") pod \"fd723389-ea27-472f-9d51-9fa6565358d4\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.345756 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-utilities\") pod \"fd723389-ea27-472f-9d51-9fa6565358d4\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.345982 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ld9sd\" (UniqueName: \"kubernetes.io/projected/fd723389-ea27-472f-9d51-9fa6565358d4-kube-api-access-ld9sd\") pod \"fd723389-ea27-472f-9d51-9fa6565358d4\" (UID: \"fd723389-ea27-472f-9d51-9fa6565358d4\") " Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.346916 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-utilities" (OuterVolumeSpecName: "utilities") pod "fd723389-ea27-472f-9d51-9fa6565358d4" (UID: "fd723389-ea27-472f-9d51-9fa6565358d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.353520 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd723389-ea27-472f-9d51-9fa6565358d4-kube-api-access-ld9sd" (OuterVolumeSpecName: "kube-api-access-ld9sd") pod "fd723389-ea27-472f-9d51-9fa6565358d4" (UID: "fd723389-ea27-472f-9d51-9fa6565358d4"). InnerVolumeSpecName "kube-api-access-ld9sd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.414601 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd723389-ea27-472f-9d51-9fa6565358d4" (UID: "fd723389-ea27-472f-9d51-9fa6565358d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.448496 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ld9sd\" (UniqueName: \"kubernetes.io/projected/fd723389-ea27-472f-9d51-9fa6565358d4-kube-api-access-ld9sd\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.448535 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.448548 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd723389-ea27-472f-9d51-9fa6565358d4-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.751717 4922 generic.go:334] "Generic (PLEG): container finished" podID="fd723389-ea27-472f-9d51-9fa6565358d4" containerID="fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760" exitCode=0 Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.751795 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-brjh2" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.751819 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brjh2" event={"ID":"fd723389-ea27-472f-9d51-9fa6565358d4","Type":"ContainerDied","Data":"fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760"} Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.752336 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-brjh2" event={"ID":"fd723389-ea27-472f-9d51-9fa6565358d4","Type":"ContainerDied","Data":"175ba6f6a2d6e862c20c310e6fdcfbbdf2c1d4be2389678cc5e1e5c1b9fd947e"} Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.752367 4922 scope.go:117] "RemoveContainer" containerID="fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.794843 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-brjh2"] Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.815282 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-brjh2"] Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.817619 4922 scope.go:117] "RemoveContainer" containerID="4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.858323 4922 scope.go:117] "RemoveContainer" containerID="9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.915422 4922 scope.go:117] "RemoveContainer" containerID="fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760" Sep 30 01:27:00 crc kubenswrapper[4922]: E0930 01:27:00.916004 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760\": container with ID starting with fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760 not found: ID does not exist" containerID="fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.916040 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760"} err="failed to get container status \"fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760\": rpc error: code = NotFound desc = could not find container \"fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760\": container with ID starting with fecd56d58c0efd6aa80d54b50b619bd8ebad38bc59627ec972317e6532f0d760 not found: ID does not exist" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.916066 4922 scope.go:117] "RemoveContainer" containerID="4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc" Sep 30 01:27:00 crc kubenswrapper[4922]: E0930 01:27:00.916541 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc\": container with ID starting with 4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc not found: ID does not exist" containerID="4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.916577 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc"} err="failed to get container status \"4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc\": rpc error: code = NotFound desc = could not find container \"4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc\": container with ID starting with 4d9dae293250d9a006e96cce07ced214b7048310f8dab2bf9db9b390890f69dc not found: ID does not exist" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.916604 4922 scope.go:117] "RemoveContainer" containerID="9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f" Sep 30 01:27:00 crc kubenswrapper[4922]: E0930 01:27:00.917012 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f\": container with ID starting with 9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f not found: ID does not exist" containerID="9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f" Sep 30 01:27:00 crc kubenswrapper[4922]: I0930 01:27:00.917030 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f"} err="failed to get container status \"9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f\": rpc error: code = NotFound desc = could not find container \"9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f\": container with ID starting with 9cdba98f9a62cc8a2a10721c1d6782f5a6c3092158546889af0c5afae6c22c1f not found: ID does not exist" Sep 30 01:27:02 crc kubenswrapper[4922]: I0930 01:27:02.442529 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd723389-ea27-472f-9d51-9fa6565358d4" path="/var/lib/kubelet/pods/fd723389-ea27-472f-9d51-9fa6565358d4/volumes" Sep 30 01:27:02 crc kubenswrapper[4922]: I0930 01:27:02.726085 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:27:02 crc kubenswrapper[4922]: I0930 01:27:02.726147 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:27:02 crc kubenswrapper[4922]: I0930 01:27:02.828555 4922 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:27:02 crc kubenswrapper[4922]: I0930 01:27:02.898229 4922 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:27:03 crc kubenswrapper[4922]: I0930 01:27:03.792408 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-97gxp"] Sep 30 01:27:04 crc kubenswrapper[4922]: I0930 01:27:04.823774 4922 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-97gxp" podUID="af3655d9-2e97-400b-a17d-3419c26e9196" containerName="registry-server" containerID="cri-o://2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56" gracePeriod=2 Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.391704 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.475269 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqrwd\" (UniqueName: \"kubernetes.io/projected/af3655d9-2e97-400b-a17d-3419c26e9196-kube-api-access-dqrwd\") pod \"af3655d9-2e97-400b-a17d-3419c26e9196\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.475352 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-utilities\") pod \"af3655d9-2e97-400b-a17d-3419c26e9196\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.475540 4922 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-catalog-content\") pod \"af3655d9-2e97-400b-a17d-3419c26e9196\" (UID: \"af3655d9-2e97-400b-a17d-3419c26e9196\") " Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.478458 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-utilities" (OuterVolumeSpecName: "utilities") pod "af3655d9-2e97-400b-a17d-3419c26e9196" (UID: "af3655d9-2e97-400b-a17d-3419c26e9196"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.493747 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af3655d9-2e97-400b-a17d-3419c26e9196" (UID: "af3655d9-2e97-400b-a17d-3419c26e9196"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.503107 4922 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af3655d9-2e97-400b-a17d-3419c26e9196-kube-api-access-dqrwd" (OuterVolumeSpecName: "kube-api-access-dqrwd") pod "af3655d9-2e97-400b-a17d-3419c26e9196" (UID: "af3655d9-2e97-400b-a17d-3419c26e9196"). InnerVolumeSpecName "kube-api-access-dqrwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.578818 4922 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.578859 4922 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqrwd\" (UniqueName: \"kubernetes.io/projected/af3655d9-2e97-400b-a17d-3419c26e9196-kube-api-access-dqrwd\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.578876 4922 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3655d9-2e97-400b-a17d-3419c26e9196-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.839798 4922 generic.go:334] "Generic (PLEG): container finished" podID="af3655d9-2e97-400b-a17d-3419c26e9196" containerID="2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56" exitCode=0 Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.839872 4922 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-97gxp" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.839896 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-97gxp" event={"ID":"af3655d9-2e97-400b-a17d-3419c26e9196","Type":"ContainerDied","Data":"2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56"} Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.844455 4922 scope.go:117] "RemoveContainer" containerID="2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.844813 4922 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-97gxp" event={"ID":"af3655d9-2e97-400b-a17d-3419c26e9196","Type":"ContainerDied","Data":"5f6fa1df494877c494d172233cf79484f2ebf5b5a4127e6f63f45194cbb52ed0"} Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.887223 4922 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-97gxp"] Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.894091 4922 scope.go:117] "RemoveContainer" containerID="919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.899646 4922 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-97gxp"] Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.923008 4922 scope.go:117] "RemoveContainer" containerID="9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.989607 4922 scope.go:117] "RemoveContainer" containerID="2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56" Sep 30 01:27:05 crc kubenswrapper[4922]: E0930 01:27:05.990344 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56\": container with ID starting with 2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56 not found: ID does not exist" containerID="2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.990423 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56"} err="failed to get container status \"2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56\": rpc error: code = NotFound desc = could not find container \"2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56\": container with ID starting with 2e9b91cbc60200e5586347b3a21739b378316ba4cca4d1aed770fa02cb0daa56 not found: ID does not exist" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.990518 4922 scope.go:117] "RemoveContainer" containerID="919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185" Sep 30 01:27:05 crc kubenswrapper[4922]: E0930 01:27:05.990999 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185\": container with ID starting with 919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185 not found: ID does not exist" containerID="919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.991060 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185"} err="failed to get container status \"919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185\": rpc error: code = NotFound desc = could not find container \"919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185\": container with ID starting with 919a57fdc5345dbf00c9560f907b46aaab544dcd36991e95fae2cd928d74d185 not found: ID does not exist" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.994857 4922 scope.go:117] "RemoveContainer" containerID="9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206" Sep 30 01:27:05 crc kubenswrapper[4922]: E0930 01:27:05.996524 4922 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206\": container with ID starting with 9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206 not found: ID does not exist" containerID="9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206" Sep 30 01:27:05 crc kubenswrapper[4922]: I0930 01:27:05.996571 4922 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206"} err="failed to get container status \"9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206\": rpc error: code = NotFound desc = could not find container \"9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206\": container with ID starting with 9d57248559e336df99e5540e9449ca93cde19d245176b93688d68b94a34cd206 not found: ID does not exist" Sep 30 01:27:06 crc kubenswrapper[4922]: I0930 01:27:06.455456 4922 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af3655d9-2e97-400b-a17d-3419c26e9196" path="/var/lib/kubelet/pods/af3655d9-2e97-400b-a17d-3419c26e9196/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066631033024450 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066631034017366 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066603277016521 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066603277015471 5ustar corecore